+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.HXwONk8wCs --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [7840/7840 modules configured] [2495/5201 modules rendered] [2 ymakes processing] [7840/7840 modules configured] [5144/5201 modules rendered] [2 ymakes processing] [7840/7840 modules configured] [5201/5201 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7846/7846 modules configured] [5201/5201 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |29.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |29.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Linker/libllvm14-lib-Linker.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IRReader/libllvm14-lib-IRReader.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libsan/liblibs-cxxsupp-libsan.global.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libsan/liblibs-cxxsupp-libsan.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/BinaryFormat/libllvm14-lib-BinaryFormat.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |30.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_common.cpp |30.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |30.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |31.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_unit.cpp |31.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |31.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/AsmParser/libllvm14-lib-AsmParser.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan/libclang_rt.asan-x86_64.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |31.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |31.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record.cpp |32.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/backup_restore_traits.h_serialized.cpp |32.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/_a5874f235d39dc6d1df389245e.yasm |32.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_cdc_serializer.cpp |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_traits.cpp |32.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_body_serializer.cpp |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |32.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/change_exchange.h_serialized.cpp |32.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender.cpp |33.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |33.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |33.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCParser/liblib-MC-MCParser.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx512/liblibs-hyperscan-runtime_avx512.a |32.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |32.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ProfileData/libllvm14-lib-ProfileData.a |33.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |33.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |34.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_read_table.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |34.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |34.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |34.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |34.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |34.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_failpoints.cpp |34.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |35.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__init.cpp |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |35.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |35.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |35.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |35.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |36.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |36.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |36.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |36.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |36.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |36.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |36.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |36.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |37.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |37.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |37.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |37.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |37.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |38.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__write.cpp |37.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |37.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Object/libllvm14-lib-Object.a |38.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |38.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |38.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |38.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |38.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |39.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |39.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |39.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |39.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |39.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |39.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |40.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |40.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |40.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |40.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |40.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |40.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |40.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |41.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |41.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |41.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector.cpp |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser_public/liblibrary-persqueue-topic_parser_public.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard.cpp |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |43.0%| PREPARE $(YMAKE_PYTHON3-4256832079) - 0 bytes |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/IR/libllvm14-lib-IR.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Demangle/libllvm14-lib-Demangle.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/libllvm14-lib-ExecutionEngine.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Remarks/libllvm14-lib-Remarks.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Analysis/libllvm14-lib-Analysis.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/upload_stats.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/scan_common.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/sample_k.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/range_ops.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/reshuffle_kmeans.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/restore_unit.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/operation.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/local_kmeans.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_scan.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_common.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execution_unit.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_zstd.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_table.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_iface.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_effects.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_lookup_table.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_delete_rows.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_s3_upload.h_serialized.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_raw.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit_kind.h_serialized.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/CodeGen/libllvm14-lib-CodeGen.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/follower_edge.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_validator.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_helpers.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_conflicts.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/kmeans_helper.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/probes.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/type_serialization.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_locks.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/extstorage_usage_config.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/import_s3.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/codegen/libyt-codec-codegen.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/hash/libyt-lib-hash.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/log/libyt-lib-log.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/comp_nodes/llvm14/libyt-comp_nodes-llvm14.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_stats/libclient-impl-ydb_stats.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/resources/libcpp-client-resources.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_driver/libcpp-client-ydb_driver.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_common_client/libcpp-client-ydb_common_client.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/common/libproviders-yt-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/query_stats/libclient-ydb_table-query_stats.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_params/libcpp-client-ydb_params.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_discovery/libcpp-client-ydb_discovery.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/fatal_error_handlers/libclient-ydb_types-fatal_error_handlers.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_query/impl/libclient-ydb_query-impl.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_operation/libcpp-client-ydb_operation.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/libcpp-client-ydb_topic.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/libclient-ydb_types-credentials.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/exceptions/libclient-ydb_types-exceptions.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_ss_tasks/libcpp-client-ydb_ss_tasks.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/common/libclient-ydb_topic-common.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_result/libcpp-client-ydb_result.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/login/libydb_types-credentials-login.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bsconfig/libydb-services-bsconfig.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/impl/libclient-ydb_persqueue_core-impl.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_proto/libcpp-client-ydb_proto.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/include/libclient-ydb_persqueue_public-include.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_common_client/impl/libclient-ydb_common_client-impl.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/iam_private/libcpp-client-iam_private.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/resources/libcpp-client-resources.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_import/libcpp-client-ydb_import.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/operation/libclient-ydb_types-operation.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/schema/libyt-lib-schema.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/job/libproviders-yt-job.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_rate_limiter/libcpp-client-ydb_rate_limiter.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/jwt/libpublic-lib-jwt.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/libpublic-lib-operation_id.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/opt/libproviders-yt-opt.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/libcpp-client-ydb_table.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/libproviders-yt-codec.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_coordination/libcpp-client-ydb_coordination.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_scheme/libcpp-client-ydb_scheme.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_query/libcpp-client-ydb_query.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_extension/libcpp-client-ydb_extension.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/status/libclient-ydb_types-status.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/libcpp-client-ydb_types.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_value/libcpp-client-ydb_value.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_export/libcpp-client-ydb_export.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_types/credentials/oauth2_token_exchange/libydb_types-credentials-oauth2_token_exchange.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_datastreams/libcpp-client-ydb_datastreams.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/comp_nodes/dq/libyt-comp_nodes-dq.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/native/libyt-gateway-native.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/impl/libclient-ydb_persqueue_public-impl.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/draft/libcpp-client-draft.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm14/libminikql-computation-llvm14.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/libessentials-parser-proto_ast.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm14/libminikql-codegen-llvm14.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_table/impl/libclient-ydb_table-impl.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/protos/liblib-operation_id-protos.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/impl/libclient-ydb_topic-impl.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/iam/common/libclient-iam-common.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/proto/libproviders-yt-proto.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |57.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/stat/uploader/libproviders-stat-uploader.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/codec/codegen/libyt-codec-codegen.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/async_io/libproviders-solomon-async_io.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |57.9%| PREPARE $(PYTHON) - 0 bytes |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |57.9%| PREPARE $(VCS) - 0 bytes |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/MC/libllvm14-lib-MC.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |57.7%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |58.0%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/raw_client/libcpp-mapreduce-raw_client.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Support/libllvm14-lib-Support.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |53.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |53.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/libllvm14-lib-Target.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/py3/libpy3python-websocket-client-py3.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/py3/libpy3python-websocket-client-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |53.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_e9446a953b5a015999d71407a6.yasm |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.a |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/packet.cpp |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm14/libminikql-invoke_builtins-llvm14.a |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |53.9%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |53.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |54.1%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 0 bytes |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/phoenix.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Target/X86/liblib-Target-X86.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |53.9%| PREPARE $(CLANG_FORMAT-2313326005) - 0 bytes |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |53.8%| PREPARE $(LLD_ROOT-2644097164) - 0 bytes |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/crash_handler.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |53.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |52.7%| PREPARE $(FLAKE8_PY3-1472545107) - 0 bytes |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_buffered_dynamic_table_writer.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |52.5%| PREPARE $(FLAKE8_PY2-2255386470) - 0 bytes |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/origin_attributes.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/new_fair_share_thread_pool.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |52.1%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) - 0 bytes |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error_code.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |51.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/string_helpers.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/stripped_error.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/config.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |50.5%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/flake8_linter/flake8_linter |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |49.1%| PREPARE $(TEST_TOOL_HOST-sbr:7480276291) - 0 bytes |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libpy3connector-api-common.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libpy3connector-api-common.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |48.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_b74ebee90bb7903d84da5b42f7.yasm |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |48.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/_18e05a10f6ea49dd0f554fa51f.yasm |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpython-symbols-python.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.a |48.8%| PREPARE $(BLACK_LINTER-sbr:6648883615) - 0 bytes |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |48.7%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/black_linter/black_linter |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |49.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/_10c9874010308af47fbf8680a3.yasm |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |48.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/TextAPI/libllvm14-lib-TextAPI.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |49.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/ut/ydb-core-blobstorage-crypto-ut |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |49.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/cluster_ordering-ut |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |49.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_builder/yaml_config-validator-ut-validator_builder |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |49.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut_perf/ydb-core-erasure-ut_perf |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |49.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator/ydb-library-yaml_config-validator-ut-validator |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |49.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/ut/ydb-core-resource_pools-ut |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |49.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |50.1%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/cpp_style_checker/cpp_style_checker |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |50.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/abstract/liblibrary-formats-arrow-accessor-abstract.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/functional/kqp/kqp_query_svc/main.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/libconnector-api-common.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/client/liblibrary-grpc-client.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |50.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/_58fd777f7f26b92a5c3a65ebdb.yasm |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/_c3c77022f32ea9f03063c598f4.yasm |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/_b74ebee90bb7903d84da5b42f7.yasm |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |50.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/Utils/liblib-Transforms-Utils.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/mkql_proto/mkql_proto_ut.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/_013701e9e21d7e09e202127262.yasm |50.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/_b74ebee90bb7903d84da5b42f7.yasm |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm14/lib/Transforms/IPO/liblib-Transforms-IPO.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |50.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dynumber/_48a078239a7d32a31a8d7798bb.yasm |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |50.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |50.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/crypto/liblibs-openssl-crypto.a |50.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |50.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/_020e2413fa05acf6fcc0b6a0a8.yasm |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/libcore-io_formats-arrow.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/_c0f0d29b6c33e7f05e57d209d8.yasm |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |50.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_helper.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_log.cpp |50.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/audit_logins.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_publisher_service_actor.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |50.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_mon.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_bsconfig.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_cms.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_backup.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |51.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/_f0624cc5f1734a0e6284cc3450.yasm |51.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/_77628d0748cc5bf266f6e05741.yasm |51.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/_b74ebee90bb7903d84da5b42f7.yasm |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |51.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_61e4b816cf79b7606ca15b5877.yasm |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |51.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_35c2f7a04f289a9f72763c2025.yasm |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_export.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_view.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/table_settings.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_import.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_login.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |51.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/_b74ebee90bb7903d84da5b42f7.yasm |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_replication.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |51.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |50.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/security/simple/libmvp-security-simple.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/libclient-yc_private-access.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/libmvp-core-protos.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |51.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/actors/ut/yql_arrow_push_down_ut.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/libclient-nc_private-iam.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_fq.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/libclient-yc_private-quota.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/obfuscate/liblibrary-persqueue-obfuscate.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/rule/libtx-tiering-rule.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/libydb-mvp-core.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |51.1%| PREPARE $(WITH_JDK17-sbr:6941855347) - 0 bytes |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_arrow_memory_pool.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_effects.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |51.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |51.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |51.5%| PREPARE $(JDK17-4020545899) - 0 bytes |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |51.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |51.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |51.6%| PREPARE $(JDK_DEFAULT-4020545899) - 0 bytes |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/libyc_private-ydb-v1.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |51.6%| PREPARE $(JDK11-1325468316) - 0 bytes |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |51.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |51.6%| PREPARE $(CLANG-1735056821) - 0 bytes |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |51.7%| PREPARE $(WITH_JDK-sbr:6941855347) - 0 bytes |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |51.7%| PREPARE $(WITH_JDK11-sbr:6936090488) - 0 bytes |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |51.8%| PREPARE $(CLANG18-390461695) - 0 bytes |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |51.8%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |51.9%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |51.9%| PREPARE $(CLANG14-1922233694) - 0 bytes |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |52.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |51.9%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |51.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |51.9%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |52.0%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |52.1%| PREPARE $(CLANG-1922233694) - 0 bytes |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |52.1%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |51.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |51.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.pb.cc |51.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |51.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |51.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.pb.cc |52.3%| PREPARE $(GDB) - 0 bytes |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_program_builder.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_table.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_factory.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_meta.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_factory.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor_settings.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.grpc.pb.cc |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |52.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_ae6accdc802b0e073e8d19156b.yasm |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |52.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_c36460c1f3f976caa23b5bd087.yasm |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |52.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/_b74ebee90bb7903d84da5b42f7.yasm |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |52.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_group/main.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/common/util_ut.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |52.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_b74ebee90bb7903d84da5b42f7.yasm |52.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_270cc1cc4ad07a20fdc1de7945.yasm |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |52.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/_4a6a74a0ab38f783afd5375054.yasm |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a >> ResourcePoolTest::SettingsExtracting [GOOD] >> ResourcePoolClassifierTest::IntSettingsParsing [GOOD] >> ResourcePoolTest::PercentSettingsParsing [GOOD] >> ResourcePoolClassifierTest::SettingsExtracting [GOOD] >> ResourcePoolTest::IntSettingsParsing [GOOD] >> ResourcePoolTest::SettingsValidation [GOOD] >> ResourcePoolClassifierTest::SettingsValidation [GOOD] >> ResourcePoolTest::SecondsSettingsParsing [GOOD] >> ResourcePoolClassifierTest::StringSettingsParsing [GOOD] |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/provider/yql_s3_listing_strategy_ut.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a >> Validator::OpaqueMaps [GOOD] >> Validator::MapValidation [GOOD] >> Validator::IntArrayValidation [GOOD] >> Validator::Enums [GOOD] >> Validator::StringValidation [GOOD] >> Validator::BoolValidation [GOOD] >> Validator::MultitypeNodeValidation [GOOD] |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a >> Validator::IntValidation [GOOD] >> TBlobStorageCrypto::TestMixedStreamCypher >> TWeighedOrderingTest::WeighedSelectionTest [GOOD] >> TWeighedOrderingTest::SimpleSelectionTest [GOOD] >> TWeighedOrderingTest::WeighedOrderingTest [GOOD] >> TErasurePerfTest::Split |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a >> TBlobStorageCrypto::TestMixedStreamCypher [GOOD] >> TBlobStorageCrypto::TestOffsetStreamCypher [GOOD] >> TBlobStorageCrypto::TestInplaceStreamCypher [GOOD] >> TBlobStorageCrypto::PerfTestStreamCypher >> ValidatorBuilder::CreateMultitypeNode [GOOD] >> ValidatorBuilder::CanHaveMultipleType [GOOD] >> ValidatorBuilder::BuildSimpleValidator [GOOD] >> ValidatorBuilder::CanCreateAllTypesOfNodes [GOOD] >> ValidatorBuilder::CanHaveDuplicateType [GOOD] |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/transactions/libdata_sharing-common-transactions.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a >> TBlobStorageCrypto::PerfTestStreamCypher [GOOD] >> TBlobStorageCrypto::UnalignedTestStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher >> TErasurePerfTest::Split [GOOD] >> TErasurePerfTest::Restore |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] >> test.py::py2_flake8 [GOOD] |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |52.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher >> test.py::py2_flake8 [GOOD] |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_factory.cpp >> conftest.py::flake8 [GOOD] |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp >> docker_wrapper_test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_decimal.cc |52.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/resource_pools/ut/unittest >> ResourcePoolClassifierTest::StringSettingsParsing [GOOD] |52.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_large/unittest >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |52.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/validator/ut/validator/unittest >> Validator::IntValidation [GOOD] |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/datum.cc |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/io_util.cc |52.6%| [TS] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest >> TWeighedOrderingTest::WeighedOrderingTest [GOOD] >> test.py::py2_flake8 [GOOD] |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |52.6%| [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/validator/ut/validator_builder/unittest >> ValidatorBuilder::CanHaveDuplicateType [GOOD] |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a >> __main__.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_base.cc |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_stability.py::flake8 [GOOD] |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp >> http_client.py::flake8 [GOOD] >> query_results.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> tstool.py::flake8 [GOOD] |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a >> __main__.py::flake8 [GOOD] |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/utf8.cc |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_flow.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/driver/flake8 >> __main__.py::flake8 [GOOD] |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/gateway/file/libyt-gateway-file.a >> test.py::py2_flake8 [GOOD] |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/config/flake8 >> test_config_with_metadata.py::flake8 [GOOD] |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/tools/tstool/flake8 >> tstool.py::flake8 [GOOD] |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/http_api_client/flake8 >> query_results.py::flake8 [GOOD] |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stability/ydb/flake8 >> test_stability.py::flake8 [GOOD] |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/ydb_serializable/replay/flake8 >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_liveness_wardens.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_transform.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |52.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestMixedStreamCypher >> tpc_tests.py::flake8 [GOOD] |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> TBlobStorageCryptoRope::TestMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestOffsetStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestInplaceStreamCypher >> test.py::py2_flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a >> test.py::py2_flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] >> TBlobStorageCryptoRope::TestInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::PerfTestStreamCypher >> test.py::py2_flake8 [GOOD] >> test_dynumber.py::flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/ydb_serializable/flake8 >> __main__.py::flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} >> TBlobStorageCryptoRope::PerfTestStreamCypher [GOOD] >> TBlobStorageCryptoRope::UnalignedTestStreamCypher [GOOD] >> TChaCha::KeystreamTest1 [GOOD] >> TChaCha::KeystreamTest2 [GOOD] >> TChaCha::KeystreamTest3 [GOOD] >> TChaCha::KeystreamTest4 [GOOD] >> TChaCha::KeystreamTest5 [GOOD] >> TChaCha::KeystreamTest6 [GOOD] >> TChaCha::KeystreamTest7 [GOOD] >> TChaCha::KeystreamTest8 [GOOD] >> TChaCha::MultiEncipherOneDecipher [GOOD] >> TChaCha::SecondBlock [GOOD] >> TChaCha512::KeystreamTest1 [GOOD] >> TChaCha512::KeystreamTest2 [GOOD] >> TChaCha512::KeystreamTest3 [GOOD] >> TChaCha512::KeystreamTest4 [GOOD] >> TChaCha512::KeystreamTest5 [GOOD] >> TChaCha512::KeystreamTest6 [GOOD] >> TChaCha512::KeystreamTest7 [GOOD] >> TChaCha512::KeystreamTest8 [GOOD] >> TChaCha512::MultiEncipherOneDecipher [GOOD] >> TChaCha512::SecondBlock [GOOD] >> TChaCha512::CompatibilityTest >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |52.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut_transform/flake8 >> test_transform.py::flake8 [GOOD] |53.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |52.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |53.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.0%| [TS] {asan, default-linux-x86_64, release} ydb/tools/statistics_workload/flake8 >> __main__.py::flake8 [GOOD] |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |53.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} >> test.py::py2_flake8 [GOOD] >> TChaCha512::CompatibilityTest [GOOD] >> TChaChaVec::KeystreamTest1 [GOOD] >> TChaChaVec::KeystreamTest2 [GOOD] >> TChaChaVec::KeystreamTest3 [GOOD] >> TChaChaVec::KeystreamTest4 [GOOD] >> TChaChaVec::KeystreamTest5 [GOOD] >> TChaChaVec::KeystreamTest6 [GOOD] >> TChaChaVec::KeystreamTest7 [GOOD] >> TChaChaVec::KeystreamTest8 [GOOD] >> TChaChaVec::MultiEncipherOneDecipher [GOOD] >> TChaChaVec::SecondBlock [GOOD] >> TChaChaVec::CompatibilityTest >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |53.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/ut/ydb-core-fq-libs-hmac-ut |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |53.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |53.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |53.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} >> TChaChaVec::CompatibilityTest [GOOD] >> TPoly1305::TestVector1 [GOOD] >> TPoly1305::TestVector2 [GOOD] >> TPoly1305::TestVector3 [GOOD] >> TPoly1305::TestVector4 [GOOD] >> TPoly1305Vec::TestVector1 [GOOD] >> TPoly1305Vec::TestVector2 [GOOD] >> TPoly1305Vec::TestVector3 [GOOD] >> TPoly1305Vec::TestVector4 [GOOD] >> TTest_t1ha::TestZeroInputHashIsNotZero [GOOD] >> TTest_t1ha::PerfTest >> test.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] >> TTest_t1ha::PerfTest [GOOD] >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/dynumber/flake8 >> test_dynumber.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> gen-report.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> compare.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> integrations_test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} >> test_insert_restarts.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} >> test.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} >> select_positive.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} >> test.py::flake8 [GOOD] |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] >> run_tests.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> TErasurePerfTest::Restore [GOOD] |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp >> __main__.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> hive_matchers.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> TErasureSmallBlobSizePerfTest::StringErasureMode [GOOD] >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} >> test.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} >> test_dynamic_tenants.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |53.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.1%| [TS] {asan, default-linux-x86_64, release} ydb/tools/cfg/bin/flake8 >> __main__.py::flake8 [GOOD] |53.1%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/public/tools/lib/cmds/ut/flake8 >> test.py::flake8 [GOOD] |53.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |53.2%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |53.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/crypto/ut/unittest >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |53.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |53.2%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |53.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/data_source.pb.{h, cc} |53.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |53.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/library/ut/flake8 >> integrations_test.py::flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |53.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/public/tools/ydb_recipe/flake8 >> __main__.py::flake8 [GOOD] |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/erasure/ut_perf/unittest >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/writer.cc |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |53.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tenants/flake8 >> test_tenants.py::flake8 [GOOD] |53.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |53.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |53.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_reduce.cpp |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_condense1.cpp |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_topic_v1.{pb.h ... grpc.pb.h} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} >> HmacSha::HmacSha1 [GOOD] |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |53.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |53.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |53.5%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_ifpresent.cpp |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |53.5%| PREPARE $(CLANG16-1380963495) - 0 bytes |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/common/endpoint.pb.{h, cc} >> DoubleIndexedTests::TestUpsertByBothKeys [GOOD] >> DoubleIndexedTests::TestErase [GOOD] >> DoubleIndexedTests::TestMerge [GOOD] >> DoubleIndexedTests::TestFind [GOOD] >> DoubleIndexedTests::TestUpsertBySingleKey [GOOD] >> test.py::py2_flake8 [GOOD] |53.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |53.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |53.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp >> test.py::py2_flake8 [GOOD] |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_filter_ut.cpp |53.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/hmac/ut/unittest >> HmacSha::HmacSha1 [GOOD] |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/json_parser_ut.cpp |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_pickle.cpp |53.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} >> test.py::py2_flake8 [GOOD] >> test_compatibility.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/util_internal.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_multimap.cpp >> test.py::py2_flake8 [GOOD] >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/csf_converter.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider_context.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp >> main.py::flake8 [GOOD] |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |53.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/ut/ydb-core-fq-libs-signer-ut |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_op_hash.cpp >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_leader_start_inflight.py::flake8 [GOOD] |53.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_double_indexed/unittest >> DoubleIndexedTests::TestUpsertBySingleKey [GOOD] |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |53.6%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |53.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/type.cc |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_coalesce.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |53.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp >> kikimr_config.py::flake8 [GOOD] >> runner.py::flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp >> common.cpp::clang_format [GOOD] >> common.h::clang_format [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/terminate_policy/libudf-service-terminate_policy.global.a |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_io_discovery.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |53.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/compatibility/flake8 >> test_compatibility.py::flake8 [GOOD] |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |53.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp >> test_sql.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/apps/dstool/flake8 >> main.py::flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp >> test_encryption.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_skip.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/public/tools/local_ydb/flake8 >> __main__.py::flake8 [GOOD] |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_way.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_dictionary.cc |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/table_builder.cc >> Signer::Basic [GOOD] |53.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_visitall.cpp |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/common/clang_format >> common.h::clang_format [GOOD] |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_nop.cpp |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/tools/simple_json_diff/flake8 >> __main__.py::flake8 [GOOD] |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.a |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_squeeze_state.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_time_order_recover.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/basic_decimal.cc |53.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_queue.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |53.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_util.cc |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_next_value.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_guess.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_match_recognize.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_condense.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chain_map.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_filter.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_div.cpp >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |54.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/signer/ut/unittest >> Signer::Basic [GOOD] |54.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/memory.cc |54.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chain1_map.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/util.cc |54.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |54.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |54.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |54.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |54.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |54.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |54.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_some.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/generated/parquet_constants.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |54.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/printer.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |54.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/logging.cc |54.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/encryption.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_zstd.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/feather.fbs.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/pretty_print.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/slow.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap_ops.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_brotli.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/coo_converter.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/mutex.cc |54.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/reader_internal.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/uri.cc |54.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |54.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |54.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/utils/actors/http_sender_actor_ut.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/statistics.cc |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |54.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/stream_reader.cc |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/memory.cc |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |54.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/level_conversion.cc |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_map.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/options.cc |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/transform.cc |54.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_exists.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/writer.cc >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |54.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_withcontext.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/properties.cc |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/schema.cc |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |54.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |54.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/result.cc |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/types.cc |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_zlib.cc |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |54.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_reader.cc |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_base.cc |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_nested.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_if.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_string.cc |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_writer.cc |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_hash.cc |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fromyson.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/chunked_array.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_if_else.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |54.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_union.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_heap.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_iterable.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/data.cc |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_factory.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_key.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_provider_impl.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_check_args.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_table_desc.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_coalesce.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fold1.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_skiptake.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_group.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_mod.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_validity.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_zip.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_decimal.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_chopper.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_extend.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_compress.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_invoke.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/util.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_adaptive.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_integration.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_decimal_mul.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_toindexdict.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/dictionary.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/encryption_internal_nossl.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_while.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_dict.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/cast.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_sort.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/validate.cc |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/run_ydb.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-transfer-topic-to-table.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/ydb-dump.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tstool/tstool |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs_fixture.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.a |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/_589c15a03e749fa7fc88e46a30.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs.cpp |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/_d3a4528808d8425466dfec4185.yasm |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-topic.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_b74ebee90bb7903d84da5b42f7.yasm |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_0edefb735db84420d76f6da5ad.yasm |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.a |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/_3d91683202a822f8cc1b66c627.yasm |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/diff.cc |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/dstool/ydb-dstool |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/bin/main.cpp |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/_b74ebee90bb7903d84da5b42f7.yasm |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/_ee1c849f6822d0ae9877348f75.yasm |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/common/ut/retry_events_queue_ut.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/_ba6443d0375bfb6f8ec8a6f4e9.yasm |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_e8e379b61234dd7ed260efcc27.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_b74ebee90bb7903d84da5b42f7.yasm |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/_e4737a1de2b548fed21b6733c7.yasm |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/c/bridge.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/concatenate.cc >> MetaCache::BasicForwarding [GOOD] >> MetaCache::TimeoutFallback [GOOD] |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_nested.cc |54.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_scalar.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_ut.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_crypto_ut.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache_ut.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_833dd80e79c977aa58b8ac97ec.yasm |54.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_b74ebee90bb7903d84da5b42f7.yasm |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/_7b240b071767564ebe8b43187b.yasm ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/meta/ut/unittest >> MetaCache::TimeoutFallback [GOOD] Test command err: 2024-11-21T23:01:20.225916Z :HTTP INFO: Listening on http://[::]:13448 2024-11-21T23:01:20.226380Z :HTTP INFO: Listening on http://[::]:61794 2024-11-21T23:01:20.226792Z :HTTP DEBUG: resolving 127.0.0.1:13448 2024-11-21T23:01:20.226873Z :HTTP DEBUG: connecting 2024-11-21T23:01:20.227100Z :HTTP DEBUG: (#17,127.0.0.1:13448) outgoing connection opened 2024-11-21T23:01:20.227159Z :HTTP DEBUG: (#17,127.0.0.1:13448) <- (GET /server) 2024-11-21T23:01:20.227330Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:43284) incoming connection opened 2024-11-21T23:01:20.227446Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:43284) -> (GET /server) 2024-11-21T23:01:20.227538Z :HTTP DEBUG: Updating ownership http://127.0.0.1:61794 with deadline 2024-11-21T23:02:20.227516Z 2024-11-21T23:01:20.227570Z :HTTP DEBUG: SetRefreshTime "/server" to 2024-11-21T23:02:20.227516Z (+1732230140.227516s) 2024-11-21T23:01:20.227631Z :HTTP DEBUG: IncomingForward /server to http://127.0.0.1:61794 timeout 30.000000s 2024-11-21T23:01:20.227724Z :HTTP DEBUG: resolving 127.0.0.1:61794 2024-11-21T23:01:20.227761Z :HTTP DEBUG: connecting 2024-11-21T23:01:20.227854Z :HTTP DEBUG: (#19,127.0.0.1:61794) outgoing connection opened 2024-11-21T23:01:20.227885Z :HTTP DEBUG: (#19,127.0.0.1:61794) <- (GET /server) 2024-11-21T23:01:20.228018Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:56664) incoming connection opened 2024-11-21T23:01:20.228092Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:56664) -> (GET /server) 2024-11-21T23:01:20.228260Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:56664) <- (200 Found) 2024-11-21T23:01:20.228334Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:56664) connection closed 2024-11-21T23:01:20.228462Z :HTTP DEBUG: (#19,127.0.0.1:61794) -> (200 Found) 2024-11-21T23:01:20.228523Z :HTTP DEBUG: (#19,127.0.0.1:61794) connection closed 2024-11-21T23:01:20.228702Z :HTTP DEBUG: Cache received successfull (200) response for /server 2024-11-21T23:01:20.228780Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:43284) <- (200 Found) 2024-11-21T23:01:20.228826Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:43284) connection closed 2024-11-21T23:01:20.229000Z :HTTP DEBUG: (#17,127.0.0.1:13448) -> (200 Found) 2024-11-21T23:01:20.229023Z :HTTP DEBUG: (#17,127.0.0.1:13448) connection closed 2024-11-21T23:01:20.233756Z :HTTP INFO: Listening on http://[::]:62013 2024-11-21T23:01:20.234079Z :HTTP INFO: Listening on http://[::]:4269 2024-11-21T23:01:20.234369Z :HTTP DEBUG: resolving 127.0.0.1:62013 2024-11-21T23:01:20.234443Z :HTTP DEBUG: connecting 2024-11-21T23:01:20.234557Z :HTTP DEBUG: (#17,127.0.0.1:62013) outgoing connection opened 2024-11-21T23:01:20.234588Z :HTTP DEBUG: (#17,127.0.0.1:62013) <- (GET /server) 2024-11-21T23:01:20.234724Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:40982) incoming connection opened 2024-11-21T23:01:20.234772Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:40982) -> (GET /server) 2024-11-21T23:01:20.234884Z :HTTP DEBUG: Updating ownership http://127.0.0.1:4269 with deadline 2024-11-21T23:11:20.234858Z 2024-11-21T23:01:20.234918Z :HTTP DEBUG: SetRefreshTime "/server" to 2024-11-21T23:11:20.234858Z (+1732230680.234858s) 2024-11-21T23:01:20.234949Z :HTTP DEBUG: IncomingForward /server to http://127.0.0.1:4269 timeout 30.000000s 2024-11-21T23:01:20.235036Z :HTTP DEBUG: resolving 127.0.0.1:4269 2024-11-21T23:01:20.235070Z :HTTP DEBUG: connecting 2024-11-21T23:01:20.235182Z :HTTP DEBUG: (#19,127.0.0.1:4269) outgoing connection opened 2024-11-21T23:01:20.235205Z :HTTP DEBUG: (#19,127.0.0.1:4269) <- (GET /server) 2024-11-21T23:01:20.235268Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:48414) incoming connection opened 2024-11-21T23:01:20.235365Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:48414) -> (GET /server) 2024-11-21T23:01:20.245606Z :HTTP ERROR: (#19,127.0.0.1:4269) connection closed with error: Connection timed out 2024-11-21T23:01:20.245746Z :HTTP DEBUG: (#20,[::ffff:127.0.0.1]:48414) connection closed 2024-11-21T23:01:20.245911Z :HTTP WARN: Cache received failed response with error "Connection timed out" for /server - retrying locally 2024-11-21T23:01:20.256157Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:40982) <- (200 Found) 2024-11-21T23:01:20.256224Z :HTTP DEBUG: (#18,[::ffff:127.0.0.1]:40982) connection closed 2024-11-21T23:01:20.256346Z :HTTP DEBUG: (#17,127.0.0.1:62013) -> (200 Found) 2024-11-21T23:01:20.256394Z :HTTP DEBUG: (#17,127.0.0.1:62013) connection closed |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_0c234afaa407a4418f9cfff531.yasm |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_51b73721929f13078ecfb118b8.yasm |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_b74ebee90bb7903d84da5b42f7.yasm |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_4795fb4850b9d88b2c7b5e8ec2.yasm |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_e0190ea6b9626b7936bb01e6fa.yasm |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_b74ebee90bb7903d84da5b42f7.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/_ed7a171e0f8176da271b70841d.yasm |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/_b74ebee90bb7903d84da5b42f7.yasm |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/_b353e86138e866a44fba7693ec.yasm |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_129761279a8f635b5cb25be6f6.yasm |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_3a6f4eda1ec5d2bd4b5d7ab909.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_c00748ee9f4a8df0a497895714.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/_9a3d5b70802b945274f285f587.yasm |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |54.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_b74ebee90bb7903d84da5b42f7.yasm |54.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/_86d3e302364c3382a2b168ce57.yasm |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/_a0ac6bff4d1f5e5b56eb56eb04.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.a |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/_b74ebee90bb7903d84da5b42f7.yasm |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/_36a9c2c404ae886b8a0915297e.yasm |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_45bf9e1d124d3a4ab8f9f012d8.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_6b2f2f7191f2fb9fffba30b043.yasm |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/_b74ebee90bb7903d84da5b42f7.yasm |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/_26182f71da26956759f0d6a4bc.yasm |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_context.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/codecs_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/bin/main.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_random.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/function_internal.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_boolean.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_map.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_encode.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/_b74ebee90bb7903d84da5b42f7.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/_5d85197d778faf9a7a67ead8d8.yasm |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/_c48f2a4e960700ea8a9826632f.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/builder.cc |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/chunker.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_tdigest.cc |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_b74ebee90bb7903d84da5b42f7.yasm |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_5c7efee738caa61f00e33b41e9.yasm |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |54.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/_55a8ee17216c8627b2de1b874d.yasm |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_b74ebee90bb7903d84da5b42f7.yasm |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_string.cc |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_77ff0e3be10902817b4214e3df.yasm |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/_ba4cba5dcefc679d9e6b854354.yasm |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_compare.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_nested.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/config.cc |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_grace_join.cpp |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/scalar.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/provider/ut/pushdown/pushdown_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_if.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_logical.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_aggrcount.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/bin/mvp_meta |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_guardian_impl_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/path_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/table_index_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/simple_queue/simple_queue |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/memory_stats_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob_ut.cpp |54.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/simple_queue/_ebe9a4253f4efe5c089a249dbb.yasm |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/simple_queue/libpy3simple_queue.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/simple_queue/libpy3simple_queue.global.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_addmember.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_map_join.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/column_decoder.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_top.cpp |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_binary.cc |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_exists.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg.cpp |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |54.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/ydb-core-base-ut |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/schema_internal.cc |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/adapters/orc/adapter.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_base.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_nested.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_snappy.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_lz4.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_dict.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_union.cc |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_binary.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_decimal.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_decimal.cc |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_binary.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/adapters/orc/adapter_util.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_adaptive.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_base.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_primitive.cc |54.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_c08ba1db5492c87ddfd8611d8f.yasm |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_dict.cc |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/functional/backup/s3_path_style/s3_path_style_backup_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/options.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_tdigest.cc |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/reader.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_scalar.cc |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/concatenate.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_nested.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/builder.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/diff.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/buffer.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/data.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/validate.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/util.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_9320981177f1bb46a5cf7bb627.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/chunked_array.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_aggregate.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |54.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/_b74ebee90bb7903d84da5b42f7.yasm |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/c/bridge.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_map.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compare.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_hash.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_quantile.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/cast.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/exec_plan.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_vector.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/expression.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_encode.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_compare.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernel.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/util.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/function_internal.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_mode.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/function.cc |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_nested.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_basic.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/codegen_internal.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_internal.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/converter.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_dictionary.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_var_std.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_boolean.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_boolean.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_nested.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_temporal.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_fill_null.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/column_decoder.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/hash_aggregate.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_string.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/util_internal.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_if_else.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_validity.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_numeric.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_set_lookup.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_nested.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_replace.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/column_builder.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_selection.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/config.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/registry.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_compare.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/chunker.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_temporal.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_brotli.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/chunker.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_hash.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/device.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/memory.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/parser.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/datum.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/mockfs.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/writer.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/localfs.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_delayedresp_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/extension_type.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/caching.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_arithmetic.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/util_internal.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/interfaces.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/path_util.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/filesystem.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/buffered.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/compressed.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/stdio.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/dictionary.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/chunked_builder.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/slow.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/file.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/transform.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/metadata_internal.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/options.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/feather.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/message.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_sort.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/cancel.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/json_simple.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/writer.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/options.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/object_parser.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/reader.cc |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/object_writer.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/memory_pool.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/pretty_print.cc |54.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_5689282d9693ccac57318ca874.yasm |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/converter.cc |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_b74ebee90bb7903d84da5b42f7.yasm |54.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/_3d2ad5aac03dd48ea1a0a83eb8.yasm |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/parser.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/table_builder.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/reader.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/record_batch.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/result.cc |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/status.cc |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/sparse_tensor.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |54.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/_79b13353271c8cfe46ea4b9f1e.yasm |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/table.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap_builders.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/basic_decimal.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_util.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_run_reader.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/csf_converter.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_block_counter.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/coo_converter.cc |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/scalar.cc |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/_1144738eb9e014641c1ecd8edb.yasm |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/type.cc |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bpacking.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap_ops.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/stream_writer.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Tensor.fbs.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/internal_file_decryptor.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/types.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/task_group.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_zstd.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/decimal.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_zlib.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/formatting.cc |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/cpu_info.cc |55.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/_6f43a28f839ac89d335c7ab9b1.yasm |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/int_util.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/future.cc |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/delimiting.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/memory.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/string_builder.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/logging.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/mutex.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/string.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/SparseTensor.fbs.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/key_value_metadata.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/io_util.cc |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/base64.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/trie.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/thread_pool.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Schema.fbs.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/utf8.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/time.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/tdigest.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/value_parsing.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/File.fbs.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/musl/strptime.c |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/uri.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/visitor.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Message.fbs.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/datetime/tz.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/encryption_internal_nossl.cc |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/generated/parquet_constants.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/feather.fbs.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/writer.cc |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/schema_internal.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/schema.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/path_internal.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/bloom_filter.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_scanner.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/encryption.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/reader_internal.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/reader.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/murmur3.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/stream_reader.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/generated/parquet_types.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_writer.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_reader.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encoding.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/file_writer.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |55.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_2549b9c50b780e2386d838ff17.yasm |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |55.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/_44cfa0611a8f90f9f92d8e831f.yasm |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |55.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/_b74ebee90bb7903d84da5b42f7.yasm |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_b74ebee90bb7903d84da5b42f7.yasm |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/_0c6a7b86ca2476db99d999e3e2.yasm |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |55.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/_03a7e7319c52b37778aca2325e.yasm |55.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/join_2e65a3bc8d7db29fed5b5bb7ff.yasm |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/exception.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/internal_file_encryptor.cc |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/file_reader.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/local_ydb/local_ydb |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/level_conversion.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/metadata.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/level_comparison.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/platform.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/printer.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/schema.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/properties.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/join_b7c10b4864a820ed988f274a3b.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/statistics.cc |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/_c17932a1c7065b959cf7db2c7d.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/_b74ebee90bb7903d84da5b42f7.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_primitive.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/csx_converter.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/time.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/key_value_metadata.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/sparse_tensor.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bpacking.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_chopper.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/task_group.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/delimiting.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/musl/strptime.c |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor/csx_converter.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/object_writer.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_map.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/converter.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_selection.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_string.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_optimize.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_hopping.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/protocol.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/feather.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_logical.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_dict.cc |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/json_simple.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Tensor.fbs.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/datetime/tz.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/int_util.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_minmax.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/localfs.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/vector_replace.cc |55.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/murmur3.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/internal_file_encryptor.cc |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |55.3%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/string.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/status.cc |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/thread_pool.cc |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_b74ebee90bb7903d84da5b42f7.yasm |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_dc6742774f7f7be07b72a0f255.yasm |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_join.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_4ffb80773cd819c6f64ae3337b.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_c51b3218d1d85449db60fbe731.yasm |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_b74ebee90bb7903d84da5b42f7.yasm |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_6f74072898e36b4312ab75a0db.yasm |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_f00d69da9467a4a52da9b22496.yasm |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_d4428c6555fc34a79b567ae531.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_8e0314ef7ed855a3126c9e5eb6.yasm |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/_b74ebee90bb7903d84da5b42f7.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_a0eea144e748338d07d6e2c675.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_todict.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/_c7dcee7daed3ea80f68bb6b1c8.yasm |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/_4e6881630bb7d87e9ab9f3d91f.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_b74ebee90bb7903d84da5b42f7.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_dc40bacd6a1983f1de3e155468.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/_b8f2779e20208045d5f4aadd3f.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tooptional.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/path_internal.cc |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/tdigest.cc |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_grace_join_imp.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_run_reader.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tobytes.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_hash.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_tostring.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_null.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_rh_hash.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_lookup.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/async_io/ut/dq_solomon_write_actor_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/async_io/ut/ut_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/device.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_multihopping.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_collect.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_varitem.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/mdb_endpoint_generator_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_9725501498f74c7e358c80ca6f.yasm |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_323be4a89ad1864399ea311db4.yasm |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_da0981963f86194066f883caf8.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/join_9049d840b669fdc5bfce521dda.yasm |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_754a90f95994461130feaa1756.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_b74ebee90bb7903d84da5b42f7.yasm |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/_b74ebee90bb7903d84da5b42f7.yasm |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/_61e39f83bb1c7466cec418c177.yasm |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/_2549b9c50b780e2386d838ff17.yasm |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/_df57a028ba7de3d582f12edff5.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |55.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/_b74ebee90bb7903d84da5b42f7.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/top_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_pg/flat_database_pg_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/filesystem.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/key_compare.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_snappy.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/options.cc |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_listfromrange.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_aggregate.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_udf.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_squeeze_to_list.cpp |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/operation_id/protos/operation_id.pb.{h, cc} |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_reverse.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_now.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fromstring.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/writer.cc |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_dictitems.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_epoch.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |55.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_4d6f3620ae7a47b656a8b1df88.yasm |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_d0bdb20fb3701cab7b1e468fa5.yasm |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/_b74ebee90bb7903d84da5b42f7.yasm |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber >> TPGTest::TestLogin [GOOD] |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ut_utils/libydb_persqueue_public-ut-ut_utils.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ut_utils/libydb_topic-ut-ut_utils.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/trace_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/bsconfig/bsconfig_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/describe_topic_ut.cpp ------- [TS] {asan, default-linux-x86_64, release} ydb/core/pgproxy/ut/unittest >> TPGTest::TestLogin [GOOD] Test command err: 2024-11-21T23:01:30.500939Z :PGWIRE INFO: Listening on [::]:6949 2024-11-21T23:01:30.502094Z :PGWIRE DEBUG: (#10,[::1]:40478) incoming connection opened 2024-11-21T23:01:30.502199Z :PGWIRE DEBUG: (#10,[::1]:40478) -> [1] 'i' "Initial" Size(15) protocol(0x00000300) user=user 2024-11-21T23:01:30.502326Z :PGWIRE DEBUG: (#10,[::1]:40478) <- [1] 'R' "Auth" Size(4) OK |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/topic_to_table_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullstorageratio_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/local_partition_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob_ut.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_monitoring/libcpp-client-ydb_monitoring.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_topic/ut/basic_usage_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |55.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |55.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/ut/ydb-core-config-ut |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/table.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_table.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression.cc |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_boolean.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> TMemoryPoolTest::Transactions [GOOD] >> UtilString::ShrinkToFit [GOOD] >> TMemoryPoolTest::AppendString [GOOD] >> TMemoryPoolTest::TransactionsWithAlignment [GOOD] >> TMemoryPoolTest::LongRollback [GOOD] >> TMemoryPoolTest::AllocOneByte [GOOD] |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_decimal.cc |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Schema.fbs.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/registry.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/tensor.cc |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_element.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_ensure.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |55.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_util/unittest >> TMemoryPoolTest::AllocOneByte [GOOD] |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/level_comparison.cc |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/mockfs.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp >> ConfigProto::ForbidNewRequired |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/metadata_internal.cc |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/file.cc |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a >> ConfigProto::ForbidNewRequired [GOOD] |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/record_batch.cc |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_frombytes.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |55.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/chunked_builder.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/io_formats/arrow/csv_arrow_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/column_builder.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/util_internal.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_range.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_size.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_peephole.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_safe_circular_buffer.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_chain_map.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_discard.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_blocks.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/reader.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_flatmap.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/generated/parquet_types.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/adapters/orc/adapter_util.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/filesystem/path_util.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/adapters/orc/adapter.cc |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.a |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/_4b59ef27c4220e585fc22b07d4.yasm |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/cfg/bin/ydb_configure |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/extension_type.cc |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_count.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/reader.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_15bedb40aa24416f66f53388b9.yasm |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_b74ebee90bb7903d84da5b42f7.yasm |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/_475563fb51fb0e7131a897a5c7.yasm |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/compressed.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_mapnext.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_wide_flow.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/caching.cc |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |55.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/buffered.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_set_lookup.cc |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/compression_lz4.cc |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |55.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |55.3%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/_f876c7e5551ebce27aee411303.yasm |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_take.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/string_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/stripped_error.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp >> ConfigValidation::StaticGroupSizesGrow [GOOD] >> ConfigValidation::SameStaticGroup [GOOD] >> ConfigValidation::VDiskChanged [GOOD] >> ConfigValidation::StaticGroupSizesShrink [GOOD] >> ConfigValidation::TooManyVDiskChanged [GOOD] |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |55.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/ut/unittest >> ConfigValidation::TooManyVDiskChanged [GOOD] |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/config.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |55.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_22e0b4e28e344fbe4b14fc4e7f.yasm |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/new_fair_share_thread_pool.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/phoenix.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/crash_handler.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/origin_attributes.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error_code.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_3094db96f925466f57c2e99df3.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/_b74ebee90bb7903d84da5b42f7.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bit_block_counter.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_getelem.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_enumerate.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/formatting.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/trie.cc |56.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_var_std.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/vendored/base64.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_timezone.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/decimal.cc |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |56.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernel.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/file_writer.cc |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_seq.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/file_reader.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/column_scanner.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_map_join.cpp |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/visitor.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/metadata.cc |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_join_dict.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_sort.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_internal.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/stream_writer.cc |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_arithmetic.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/array_primitive.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/value_parsing.cc |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_primitive.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_iterator.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/_60cfd0c71f99697efa7d884ea6.yasm |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/join_bf2c9ed2f082df133ad2524c35.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_checks/yaml_config-validator-ut-validator_checks |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_b74ebee90bb7903d84da5b42f7.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/_b74ebee90bb7903d84da5b42f7.yasm |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/snap_vec_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_51e39740d1d758840624baee66.yasm |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/_43bac175e95a3fec996063d2b4.yasm |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_prepend.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/stdio.cc |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/exec_plan.cc |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_block_input_filter.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_nested.cc |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/client/bin/sqs |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |56.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_b74ebee90bb7903d84da5b42f7.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/yqlrun/gateway_spec.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/main.cpp |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_55acb3440d202d5436c3eebe8d.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/client/bin/main.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/client/cpp/libymq-client-cpp.a |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_b74ebee90bb7903d84da5b42f7.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt >> Checks::MapValidation [GOOD] >> Checks::ErrorInCheck [GOOD] >> Checks::BasicStringChecks [GOOD] >> Checks::BasicIntChecks [GOOD] >> Checks::OpaqueMaps [GOOD] >> Checks::IntArrayValidation [GOOD] |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/_6b2d83fc4b34dc0640579a5038.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/yqlrun/http/libtools-yqlrun-http.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_5bb9b4714ab16ef374043b6486.yasm |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/_3989ea13006d67e89dd1a8ad12.yasm |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_query.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/commands.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_drop.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare_scheme.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_temporal.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/yqlrun/yqlrun.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/io/interfaces.cc |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_mode.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/message.cc |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_removemember.cpp |56.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/validator/ut/validator_checks/unittest >> Checks::IntArrayValidation [GOOD] |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encryption/internal_file_decryptor.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/chunker.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap_builders.cc |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/_b74ebee90bb7903d84da5b42f7.yasm >> TBatchedVecTest::TestToStringInt [GOOD] >> BufferWithGaps::Basic [GOOD] >> BufferWithGaps::IsReadable [GOOD] >> PtrTest::Test1 [GOOD] >> TBatchedVecTest::TestOutputTOutputType [GOOD] |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/_e93b94c36ea8b5ce684eea2c49.yasm |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/join_487d6f374d6d03f9641be7dbbc.yasm |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_weakmember.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_round.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/platform.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |56.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |56.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/base/ut/gtest >> TBatchedVecTest::TestOutputTOutputType [GOOD] |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug_tools/ut/ydb-core-debug_tools-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/encoding.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_nested.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/File.fbs.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/converter.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_intent_determination.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_combine.cpp |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_a1a4fef3e58eac5c8cd56e360e.yasm |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_b74ebee90bb7903d84da5b42f7.yasm |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_ff94c99b3d9492ea47f26af81f.yasm |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_a85123d3cf465cba982424dc08.yasm |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/_bf91d2c3152cd9f79aee642443.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_condense.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/mock/libcommon-http_gateway-mock.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/exec/expression.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/array/builder_binary.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_filter.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/object_parser.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp >> OperationLog::Size1000 >> OperationLog::Size1 [GOOD] >> OperationLog::Size8 [GOOD] >> OperationLog::Size29 [GOOD] |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_fill_null.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_replicate.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/tpch/tpch |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/metadata/ut/functions_metadata_ut.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_b74ebee90bb7903d84da5b42f7.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_14453aaeaf36a596bef15bc685.yasm |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_agg_sum.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/libcpp-client-ydb_federated_topic.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/basic_usage_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/_48a0e969cc306fdb22d55c035b.yasm |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp >> OperationLog::Size1000 [GOOD] >> OperationLog::ConcurrentWrites |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/SparseTensor.fbs.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/hash_aggregate.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_just.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_append.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_length.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/impl/libclient-ydb_federated_topic-impl.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_column_filter.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_hash.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp >> OperationLog::ConcurrentWrites [GOOD] |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/cpu_info.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/bloom_filter.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_arrow.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_basic.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_gateway.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_program_step.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_callable.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/codegen_internal.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/bitmap.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/string_builder.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_join_reorder.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |56.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/debug_tools/ut/unittest >> OperationLog::ConcurrentWrites [GOOD] |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/function.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/ipc/reader.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_unwrap.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/future.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_lazy_list.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_combine.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_hasitems.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_apply.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_switch.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/functional/backup/backup_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_b74ebee90bb7903d84da5b42f7.yasm |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_f2c85040cc1290644ebb21b197.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/_8dffc5726b9be6abfc5e0f9557.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_container.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/_c43757827e03b03f81c937ad5a.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/parser.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_b74ebee90bb7903d84da5b42f7.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_583eccaec03903a04e0516e9bb.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/_109168012f4665542dd2bafba9.yasm |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/ut_pg/scheme_tablecell_pg_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_fold.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_id_dict_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_contains.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/buffer.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/generated/Message.fbs.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/exception.cc |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/public/ut/unittest |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_a38dc35da146e8497390eb9070.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_2df5d6ce0e7b4d20016c681571.yasm |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/_b74ebee90bb7903d84da5b42f7.yasm |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/olap_workload/olap_workload |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_ef8d28aaeb50572325dd14d9b4.yasm |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |56.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_b74ebee90bb7903d84da5b42f7.yasm |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/_2549b9c50b780e2386d838ff17.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/olap_workload/_ff50d6fda3ad3eb37aec4bae1a.yasm |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/olap_workload/libpy3olap_workload.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/olap_workload/libpy3olap_workload.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/csv/parser.cc |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/reader.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/libpy3statistics_workload.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/_55f03b5c7ff48bc16cc7bbe438.yasm |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/statistics_workload/libpy3statistics_workload.global.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_wide_top_sort.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/api_vector.cc |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/statistics_workload/statistics_workload |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp >> Json::BasicRendering [GOOD] |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/functional/kqp/kqp_indexes/main.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_2549b9c50b780e2386d838ff17.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_b74ebee90bb7903d84da5b42f7.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/_68875c7d34f9bbe09248b5ec55.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |56.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/json/ut/unittest >> Json::BasicRendering [GOOD] |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_block_func.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_join_impl.cpp |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_5f751080373d4214d525810354.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_4a7a64454c9245b8cfbbd6c568.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_6d1763e0cdc6e301e2989d8343.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_efe80a67dc5fbde40e7e446fba.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/_b74ebee90bb7903d84da5b42f7.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/json/options.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/aggregate_quantile.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_scalar_apply.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_cast_numeric.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |56.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_op_settings.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/util/cancel.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/provider/yql_yt_block_input.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/_50e0f2b3b90efa9a1305502b02.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_b5385522105a31f0a0c490bbb8.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/mrrun/mrrun.cpp |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/join_d158d6388395f7fac32a213c83.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_b74ebee90bb7903d84da5b42f7.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_1f45adb640e82c46627e2b2d3a.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_ee50fbcae5d7c3ae1b7c168722.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/_b74ebee90bb7903d84da5b42f7.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_9cde4489f7fa94a76b9b02d638.yasm |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/_ed475535b561d333796c95a705.yasm |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compare.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/memory_pool.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm14/mkql_source.cpp >> RuntimeFeatureFlags::UpdatingRuntimeFlags [GOOD] >> RuntimeFeatureFlags::DefaultValues [GOOD] >> RuntimeFeatureFlags::ConversionFromProto [GOOD] >> RuntimeFeatureFlags::ConversionToProto [GOOD] |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/_d328b88e4d44d441b3413acc15.yasm |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/parquet/arrow/schema.cc |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |56.2%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/_d7c26740fdaf6e78003c7f21df.yasm |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/zookeeper/requests.cpp |56.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |56.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/base/generated/ut/unittest >> RuntimeFeatureFlags::ConversionToProto [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/contrib/libs/apache/arrow/cpp/src/arrow/compute/kernels/scalar_temporal.cc |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/driver/nemesis |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |64.9%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |65.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |65.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |65.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |66.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |66.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |67.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |68.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |68.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |68.9%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/_dc9abab7075b555a3ef54c0d31.yasm |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |68.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |68.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |68.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |68.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |68.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |69.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |69.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |69.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |69.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |69.4%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |69.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |70.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |70.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |70.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |70.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |70.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |70.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |70.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |70.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |71.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |71.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |72.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |72.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |72.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |72.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |72.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |72.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |73.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |73.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |73.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |73.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |74.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |74.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |74.6%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |74.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |75.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |75.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |75.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |75.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |75.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |75.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |75.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |75.4%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_8e7a34ec2df8fda1ade7839923.yasm |75.5%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_b74ebee90bb7903d84da5b42f7.yasm |75.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |75.5%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |75.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |75.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |75.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |75.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |75.6%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/_e68548efa11d3a00711f021bed.yasm |75.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |76.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |76.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |76.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |76.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |76.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/benchmark |76.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/mrrun/mrrun |77.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |77.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |77.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |77.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |77.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |77.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |77.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |77.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |77.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |77.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |77.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |77.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |77.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |77.7%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |77.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |77.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |78.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |78.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |78.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |78.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |79.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |79.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |79.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |80.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/libydb-core-protos.a |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |80.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/protos/libydb-core-protos.a |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |80.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |80.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_4a4d1a0f629769ad18cbbbf6ac.yasm |80.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_2ef1517045ab9cce02fdf81d44.yasm |80.8%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/_b74ebee90bb7903d84da5b42f7.yasm |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/build/sanitize-blacklist.txt |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |80.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |80.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |80.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |80.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |80.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |80.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |80.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |80.4%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |80.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |80.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |80.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |80.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |80.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |80.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |80.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |80.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |79.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |80.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |79.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |79.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |79.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |79.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |79.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |79.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |79.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |79.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |79.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |79.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |79.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/common_ut.cpp |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |79.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |79.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compression_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/compress_executor_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/retry_policy_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |79.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |79.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |79.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |79.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/read_session_ut.cpp |79.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |79.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/ydb-library-yaml_config-static_validator-ut |79.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |79.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |79.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut |79.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |79.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |79.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |79.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |79.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |79.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |79.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |79.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |79.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |79.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |79.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |78.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |78.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |79.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |79.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> StaticValidator::HostConfigs >> StaticValidator::HostConfigs [GOOD] >> StaticValidator::Hosts [GOOD] >> StaticValidator::DomainsConfig [GOOD] |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |78.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |78.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |78.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] |77.4%| [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/static_validator/ut/unittest >> StaticValidator::DomainsConfig [GOOD] >> LongTxServicePublicTypes::LongTxId [GOOD] >> LongTxServicePublicTypes::SnapshotMaxTxId [GOOD] >> LongTxServicePublicTypes::Snapshot [GOOD] >> Init::TWithDefaultParser |77.1%| RESOURCE $(sbr:4966407557) - 0 bytes |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/client/bin/sqs |77.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/client/bin/sqs >> Init::TWithDefaultParser [GOOD] |76.9%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |76.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/public/ut/unittest >> LongTxServicePublicTypes::Snapshot [GOOD] |76.5%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |76.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/init/ut/unittest >> Init::TWithDefaultParser [GOOD] |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |75.2%| [AR] {RESULT} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |75.1%| [AR] {RESULT} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |75.1%| [AR] {RESULT} $(B)/ydb/core/protos/libydb-core-protos.a |75.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |75.2%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |75.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |75.0%| [AR] {RESULT} $(B)/ydb/library/yql/providers/yt/provider/libproviders-yt-provider.a |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |75.1%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |74.8%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |74.8%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |74.8%| [LD] {RESULT} $(B)/ydb/tests/functional/dynumber/ydb-tests-functional-dynumber |74.7%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |74.7%| [LD] {RESULT} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |74.7%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm14/libminikql-comp_nodes-llvm14.a |74.7%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit >> TBlobStorageCompStrat::Test1 |74.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/example_configs/static_validator-ut-example_configs |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |74.7%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |74.7%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.7%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.8%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |74.8%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |74.8%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |74.8%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |74.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |74.8%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution >> TBlobStorageCompStrat::Test1 [GOOD] |74.6%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical >> TBlobStorageReplRecoveryMachine::BasicFunctionality >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> TTimeGridTest::TimeGrid [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk >> HullReplWriteSst::Basic |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |73.9%| [LD] {RESULT} $(B)/ydb/core/ymq/client/bin/sqs |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> StaticConfigExamples::MIRROR_3_DC_NODES_IN_MEMORY [GOOD] >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |73.8%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |73.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/metering/ut/unittest >> TTimeGridTest::TimeGrid [GOOD] |73.8%| [LD] {RESULT} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut >> StaticConfigExamples::SINGLE_NODE_IN_MEMORY [GOOD] >> StaticConfigExamples::BLOCK42 [GOOD] >> StaticConfigExamples::MIRROR_3_DC_9_NODES [GOOD] >> StaticConfigExamples::MIRROR_3_DC_NODES [GOOD] >> StaticConfigExamples::SingleNodeWithFile [GOOD] |73.2%| [TS] {RESULT} ydb/core/fq/libs/http_api_client/flake8 |73.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> TYardTest::TestInit >> TPDiskTest::TestAbstractPDiskInterface [GOOD] >> TYardTest::TestLogWriteRead >> TYardTest::TestChunkReadRandomOffset >> TYardTest::TestWholeLogRead >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector >> TPDiskTest::TestChunkWriteRelease >> TBlobStoragePDiskCrypto::TestMixedStreamCypher >> TPDiskTest::TestThatEveryValueOfEStateEnumKeepsItIntegerValue [GOOD] >> TLogCache::Simple [GOOD] >> TPDiskTest::TestPDiskActorErrorState >> TLogCache::EraseRangeSingleMinElement [GOOD] >> TBlobStoragePDiskCrypto::TestMixedStreamCypher [GOOD] >> TLogCache::EraseRangeSingleMidElement [GOOD] |72.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/static_validator/ut/example_configs/unittest >> StaticConfigExamples::SingleNodeWithFile [GOOD] >> TLogCache::EraseRangeSingleMaxElement [GOOD] |72.8%| COMPACTING CACHE 34.6GiB >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunk |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] >> TPDiskUtil::PayloadParsingTest [GOOD] >> TPDiskUtil::SectorRestorator [GOOD] >> TPDiskUtil::SectorRestoratorOldNewHash [GOOD] >> TBlockDeviceTest::TestDeviceWithSubmitGetThread >> TPDiskUtil::SectorPrint [GOOD] >> TPDiskUtil::TChunkIdFormatter [GOOD] >> TPDiskUtil::TOwnerPrintTest [GOOD] >> TPDiskUtil::TChunkStateEnumPrintTest [GOOD] >> TPDiskUtil::TIoResultEnumPrintTest [GOOD] >> TPDiskUtil::TIoTypeEnumPrintTest [GOOD] >> TPDiskUtil::TestNVMeSerial [GOOD] >> TPDiskUtil::TestDeviceList [GOOD] >> TPDiskUtil::TestBufferPool |72.8%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 >> TYardTest::TestLogWriteRead [GOOD] >> TYardTest::TestLogWriteReadMedium >> TPDiskTest::TestPDiskActorErrorState [GOOD] >> TYardTest::TestWholeLogRead [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopStart >> TBsDbStat::ChaoticParallelWrite_DbStat >> TYardTest::TestSysLogReordering >> TBlockDeviceTest::TestDeviceWithSubmitGetThread [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh >> TBlockDeviceTest::TestWriteSectorMapAllTypes >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh >> TBsVDiskRepl3::SyncLogTest |72.9%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_checks/unittest >> TBsVDiskRepl1::ReplProxyKeepBits >> TBsLocalRecovery::StartStopNotEmptyDB >> TBsVDiskExtreme::SimpleGetFromEmptyDB |72.9%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 >> TBsVDiskManyPutGet::ManyPutGet >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh >> TBsVDiskExtremeHuge::Simple3Put3GetFresh >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest >> Config::IncludeScope >> FormatTimes::ParseDuration [GOOD] >> TBsVDiskExtreme::Simple3Put3GetFresh |72.9%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 >> FormatTimes::DurationUs [GOOD] >> Config::IncludeScope [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction >> TBsLocalRecovery::WriteRestartReadHuge >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize >> TBsVDiskGC::GCPutKeepIntoEmptyDB |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction >> TBsVDiskBadBlobId::PutBlobWithBadId >> FormatTimes::DurationMs [GOOD] >> StatsFormat::FullStat [GOOD] >> TPDiskUtil::TestBufferPool [GOOD] >> TPDiskUtil::SectorMap |72.9%| [TS] {RESULT} ydb/core/base/generated/ut/unittest |72.9%| [TS] {RESULT} ydb/core/tx/long_tx_service/public/ut/unittest |72.9%| [TS] {RESULT} ydb/tests/library/ut/flake8 >> Config::ExcludeScope [GOOD] |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |72.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/py2_flake8 |72.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut/ydb-core-erasure-ut >> TPDiskTest::TestPDiskActorPDiskStopStart [GOOD] >> TPDiskTest::TestPDiskOwnerRecreation >> TYardTest::TestLogWriteReadMedium [GOOD] >> TYardTest::TestInit [GOOD] >> TYardTest::TestLogWriteReadMediumWithHddSectorMap >> TYardTest::TestInitOnIncompleteFormat |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] |72.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/py2_flake8 |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] >> TPDiskUtil::SectorMap [GOOD] >> TPDiskUtil::SectorMapStoreLoadFromFile [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] |72.9%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] |72.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/py2_flake8 |72.9%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/example_configs/unittest |72.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |72.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/py2_flake8 |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |72.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |72.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |73.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |73.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |73.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/py2_flake8 |73.0%| [TS] {RESULT} ydb/tests/functional/dynumber/flake8 |73.0%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/flake8 |73.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |73.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |73.0%| [TS] {RESULT} ydb/core/viewer/json/ut/unittest |73.0%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 |73.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |73.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |73.0%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |73.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/py2_flake8 |73.0%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |73.0%| [TS] {RESULT} ydb/core/debug_tools/ut/unittest |73.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/ut/ydb-core-persqueue-codecs-ut |73.0%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |73.0%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |73.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/py2_flake8 |73.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/py2_flake8 |73.0%| [TS] {RESULT} ydb/tests/stability/ydb/flake8 |73.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/py2_flake8 |73.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |73.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |73.0%| [TS] {RESULT} ydb/tests/functional/config/flake8 |73.1%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |73.1%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 |73.1%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |73.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |73.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/py2_flake8 |73.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/ut/ydb-core-config-tools-protobuf_plugin-ut |73.1%| [TS] {RESULT} ydb/core/resource_pools/ut/unittest >> TErasureTypeTest::TestStripe33LossOfAllPossible3 >> TYardTest::TestLogWriteReadMediumWithHddSectorMap [GOOD] >> TYardTest::TestLogWriteReadLarge |73.1%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |73.1%| [TS] {RESULT} ydb/core/fq/libs/hmac/ut/unittest |73.1%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 >> TErasureTypeTest::TestBlock33LossOfAllPossible3 |73.1%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |73.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/py2_flake8 |73.1%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/flake8 |73.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> TErasureTypeTest::TestStripe22LossOfAllPossible2 >> TErasureTypeTest::TestBlock42PartialRestore2 |73.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/py2_flake8 |73.1%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 |73.1%| [TS] {RESULT} ydb/tests/functional/api/flake8 |73.1%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 >> TErasureTypeTest::TestBlockByteOrder |73.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/py2_flake8 |73.1%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/flake8 >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 >> TBsVDiskRepl3::SyncLogTest [GOOD] >> TErasureTypeTest::TestBlockByteOrder [GOOD] >> TErasureTypeTest::TestBlock42LossOfAllPossible2 >> THugeMigration::ExtendMap_HugeBlobs |73.1%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |73.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/py2_flake8 |73.1%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 >> ErasureBrandNew::Block42_encode >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction |73.1%| [TM] {RESULT} ydb/core/tablet_flat/ut_large/unittest |73.1%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/ut/ydb-core-fq-libs-metrics-ut >> TErasureTypeTest::TestAllSpecies1of2 >> TErasureTypeTest::TestEo [GOOD] >> TErasureTypeTest::TestBlock23LossOfAllPossible3 |73.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |73.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |73.2%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 >> TErasureTypeTest::TestBlock42PartialRestore0 >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TYardTest::TestLogWriteReadLarge [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction >> TYardTest::TestLogWriteCutEqual >> TBsVDiskExtreme::SimpleGetFromEmptyDB [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh >> TBsVDiskExtremeHuge::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction >> TErasureTypeTest::TestMirror3LossOfAllPossible3 >> TErasureTypeTest::TestBlock42PartialRestore3 |73.2%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |73.2%| [LD] {RESULT} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |73.2%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlockByteOrder [GOOD] |73.2%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 >> TYardTest::TestInitOnIncompleteFormat [GOOD] >> TYardTest::TestInitOwner >> ErasureBrandNew::Block42_restore |73.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |73.2%| [TS] {RESULT} ydb/core/config/validation/ut/unittest |73.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |73.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestEo [GOOD] >> TErasureTypeTest::TestBlock22LossOfAllPossible2 |73.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |73.2%| [TS] {RESULT} ydb/core/fq/libs/signer/ut/unittest |73.2%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |73.2%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |73.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |73.2%| [TS] {RESULT} ydb/core/config/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore1 >> TErasureTypeTest::TestStripe32LossOfAllPossible2 >> TErasureTypeTest::TestBlock43LossOfAllPossible3 |73.2%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |73.2%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |73.2%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |73.3%| [TM] {RESULT} ydb/core/tablet_flat/ut_util/unittest >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh >> TBsVDiskExtreme::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtreme::Simple3Put3GetCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction [GOOD] |73.3%| [TS] {RESULT} ydb/public/tools/ydb_recipe/flake8 |73.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |73.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |73.3%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |73.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |73.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh >> TBsVDiskGC::GCPutKeepIntoEmptyDB [GOOD] >> TBsVDiskBadBlobId::PutBlobWithBadId [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath >> TBsVDiskGC::GCPutBarrierVDisk0NoSync >> TBsVDiskRepl1::ReplProxyKeepBits [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction |73.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |73.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |73.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |73.3%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |73.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |73.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |73.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |73.3%| [TS] {RESULT} ydb/tools/cfg/bin/flake8 |73.3%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/unittest |73.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/py2_flake8 |73.3%| [TS] {RESULT} ydb/core/tx/scheme_board/ut_double_indexed/unittest >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] |73.3%| [TS] {RESULT} ydb/core/tx/sequenceshard/public/ut/unittest |73.3%| [TS] {RESULT} ydb/core/erasure/ut_perf/unittest |73.3%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |73.3%| [TS] {RESULT} ydb/core/kqp/ut/federated_query/common/clang_format |73.3%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator/unittest |73.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] |73.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |73.4%| [TS] {RESULT} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest |73.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/py2_flake8 |73.4%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 >> TBsVDiskManyPutGet::ManyPutGet [GOOD] >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction |73.4%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/flake8 >> TYardTest::TestInitOwner [GOOD] >> TYardTest::TestIncorrectRequests >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] >> TBsVDiskGC::TGCManyVPutsDelTabletTest |73.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |73.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |73.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |73.4%| [TS] {RESULT} ydb/core/blobstorage/base/ut/gtest |73.4%| [TS] {RESULT} ydb/mvp/meta/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] >> TBlockDeviceTest::TestWriteSectorMapAllTypes [GOOD] >> TBlockDeviceTest::WriteReadRestart |73.4%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |73.4%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 >> TErasureTypeTest::TestStripe42LossOfAllPossible2 >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] |73.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |73.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |73.4%| [TS] {RESULT} ydb/tools/tstool/flake8 |73.4%| [TS] {RESULT} ydb/apps/dstool/flake8 |73.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut >> TErasureTypeTest::TestStripe23LossOfAllPossible3 |73.4%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |73.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |73.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/py2_flake8 |73.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |73.4%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |73.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/py2_flake8 |73.4%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |73.5%| [TS] {RESULT} ydb/core/metering/ut/unittest >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] |73.5%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] |73.5%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh |73.5%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |73.5%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |73.5%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 >> TYardTest::TestIncorrectRequests [GOOD] >> TYardTest::TestEmptyLogRead >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector |73.5%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy >> SamplingControlTests::EdgeCaseLower >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction >> SamplingControlTests::EdgeCaseLower [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |73.5%| [TS] {RESULT} ydb/tests/functional/rename/flake8 >> TYardTest::TestEmptyLogRead [GOOD] >> TYardTest::TestLogContinuityPersistence >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut >> TPDiskTest::TestPDiskOwnerRecreation [GOOD] >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath [GOOD] |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut >> TBsVDiskDefrag::DefragEmptyDB |73.5%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseLower [GOOD] |73.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh |73.5%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |73.5%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_builder/unittest |73.5%| [TS] {RESULT} ydb/tests/functional/cms/flake8 >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |73.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> TBsVDiskGC::GCPutBarrierVDisk0NoSync [GOOD] |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 >> TBsVDiskGC::GCPutBarrierSync |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |73.6%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh >> TBsVDiskExtreme::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |73.6%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |73.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> TYardTest::TestLogContinuityPersistence [GOOD] >> TYardTest::TestLogContinuityPersistenceLarge |73.6%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |73.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 >> THugeMigration::ExtendMap_HugeBlobs [GOOD] >> THugeMigration::ExtendMap_SmallBlobsBecameHuge >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction |73.6%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |73.6%| [TS] {RESULT} ydb/core/blobstorage/crypto/ut/unittest |73.6%| [LD] {RESULT} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |73.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector |73.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |73.6%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |73.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |73.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |73.6%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |73.6%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |73.6%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/flake8 |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] >> NameserviceConfigValidatorTests::TestEmptyConfig [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingId [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] |73.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/py2_flake8 >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |73.6%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/flake8 |73.7%| [TS] {RESULT} ydb/tools/statistics_workload/flake8 |73.7%| [TS] {RESULT} ydb/public/tools/local_ydb/flake8 |73.7%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |73.7%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/flake8 |73.7%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRepl1::ReplProxyData >> TPDiskTest::TestChunkWriteRelease [GOOD] >> TPDiskTest::TestLogWriteReadWithRestarts |73.7%| [LD] {RESULT} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |73.7%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |73.7%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 >> TBsVDiskDefrag::DefragEmptyDB [GOOD] >> TBsVDiskDefrag::Defrag50PercentGarbage >> Scheme::TSerializedCellVec [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction [GOOD] >> SysViewQueryHistory::AddDedupRandom >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction |73.7%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |73.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/py2_flake8 >> SysViewQueryHistory::AggrMerge [GOOD] >> SysViewQueryHistory::AddDedupRandom [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] |73.8%| [TS] {RESULT} ydb/core/pgproxy/ut/unittest |73.8%| [TS] {RESULT} ydb/tests/fq/common/flake8 |73.8%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |73.8%| [TS] {RESULT} ydb/core/config/init/ut/unittest >> ResourceBrokerConfigValidatorTests::TestRepeatedTaskName [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnknownQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnlimitedResource [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh >> SysViewQueryHistory::StableMerge [GOOD] |73.8%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |73.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |73.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskGC::TGCManyVPutsDelTabletTest [GOOD] >> TBsVDiskManyPutGet::ManyMultiSinglePutGet >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly >> TBlobStorageQueueTest::TMessageLost [GOOD] |73.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::TSerializedCellVec [GOOD] Test command err: Serialize: 0.000212s Cells constructor: 0.001894s Parse: 0.000175s Copy: 0.001113s Move: 0.000050s |73.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedupRandom [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMerge [GOOD] >> TBlockDeviceTest::WriteReadRestart [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge [GOOD] >> TColorLimitsTest::Colors |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageQueueTest::TMessageLost [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh [GOOD] >> TColorLimitsTest::Colors [GOOD] >> TColorLimitsTest::OwnerFreeSpaceShare [GOOD] >> TLogCache::EraseRangeOnEmpty [GOOD] >> TLogCache::EraseRangeOutsideOfData [GOOD] >> TLogCache::EraseRangeSample [GOOD] >> TLogCache::EraseRangeAllExact [GOOD] >> TLogCache::EraseRangeAllAmple [GOOD] >> PDiskCompatibilityInfo::OldCompatible >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction |73.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/ut/ydb-core-util-ut |73.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut >> TBsLocalRecovery::WriteRestartReadHuge [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased >> THugeMigration::ExtendMap_SmallBlobsBecameHuge [GOOD] >> THugeMigration::RollbackMap_HugeBlobs >> PDiskCompatibilityInfo::OldCompatible [GOOD] >> PDiskCompatibilityInfo::Incompatible >> TBsVDiskRepl1::ReplProxyData [GOOD] >> TBsVDiskRepl1::ReplEraseDiskRestore |73.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut >> TFreshAppendixTest::IterateForwardIncluding [GOOD] >> TFreshAppendixTest::IterateForwardExcluding [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh >> TBsVDiskGC::GCPutBarrierSync [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction |73.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner [GOOD] >> TPDiskTest::TestPDiskManyOwnersInitiation >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh >> PDiskCompatibilityInfo::Incompatible [GOOD] >> PDiskCompatibilityInfo::NewIncompatibleWithDefault |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateForwardExcluding [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction >> PgTest::DumpIntCells >> TopicNameConverterTest::LegacyStyle [GOOD] >> TopicNameConverterTest::FirstClass [GOOD] >> DiscoveryConverterTest::DiscoveryConverter [GOOD] >> DiscoveryConverterTest::EmptyModern [GOOD] |73.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session >> TopicNameConverterForCPTest::CorrectLegacyTopics [GOOD] >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] >> TBlobStorageHullFresh::SimpleBackWardEnd2Times [GOOD] >> TBlobStorageHullFresh::Perf >> ErasureBrandNew::Block42_encode [GOOD] >> ErasureBrandNew::Block42_chunked >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector >> PDiskCompatibilityInfo::NewIncompatibleWithDefault [GOOD] >> PDiskCompatibilityInfo::Trunk >> PgTest::DumpIntCells [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> TBsVDiskManyPutGet::ManyMultiSinglePutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet >> TYardTest::TestChunkReadRandomOffset [GOOD] >> TYardTest::TestChunkWriteRead >> TBlobStorageHullFresh::AppendixPerf >> PDiskCompatibilityInfo::Trunk [GOOD] >> PDiskCompatibilityInfo::SuppressCompatibilityCheck |73.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly >> TYardTest::TestLogContinuityPersistenceLarge [GOOD] >> TYardTest::TestHttpInfo |73.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::EmptyModern [GOOD] |73.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction >> PDiskCompatibilityInfo::SuppressCompatibilityCheck [GOOD] >> PDiskCompatibilityInfo::Migration >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector |73.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::FirstClass [GOOD] |73.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |73.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |73.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpIntCells [GOOD] |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] >> TYardTest::TestHttpInfo [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> PDiskCompatibilityInfo::Migration [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist [GOOD] >> TYardTest::TestFirstRecordToKeep >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] >> TPDiskTest::TestPDiskManyOwnersInitiation [GOOD] >> TPDiskTest::TestVDiskMock >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector >> THugeMigration::RollbackMap_HugeBlobs [GOOD] >> TYardTest::TestChunkWriteRead [GOOD] >> TMonitoring::ReregisterTest >> TBlobStorageSyncNeighborsTest::IterateOverAllDisks [GOOD] >> TYardTest::TestChunkWriteReadWithHddSectorMap >> TBlobStorageSyncNeighborsTest::SerDes [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskIterators [GOOD] >> TCircleBufStringStreamTest::TestAligned [GOOD] >> TPDiskTest::TestLogWriteReadWithRestarts [GOOD] >> TPDiskTest::TestLogSpliceNonceJump |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TMonitoring::ReregisterTest [GOOD] >> TBlobStorageHullFresh::Perf [GOOD] >> TYardTest::TestFirstRecordToKeep [GOOD] >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TPDiskTest::TestVDiskMock [GOOD] >> TPDiskTest::TestRealFile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> PDiskCompatibilityInfo::Migration [GOOD] Test command err: GREEN 0.5025125628 0 CYAN 0.8623115578 0.862 LIGHT_YELLOW 0.8934673367 0.893 YELLOW 0.9145728643 0.914 LIGHT_ORANGE 0.9306532663 0.93 PRE_ORANGE 0.9467336683 0.946 ORANGE 0.9668341709 0.966 RED 0.9879396985 0.987 BLACK 0.9979899497 0.997 >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufStringStreamTest::TestAligned [GOOD] >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut/ydb-core-base-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TMonitoring::ReregisterTest [GOOD] Test command err: RUN TEST SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut/ydb-core-base-ut |74.1%| [LD] {RESULT} $(B)/ydb/core/base/ut/ydb-core-base-ut |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::Perf [GOOD] >> TopicNameConverterTest::LegacyStyleDoubleName [GOOD] >> TopicNameConverterTest::NoTopicName [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorForward [GOOD] >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector [GOOD] >> TYardTest::TestBadDeviceInit >> TYardTest::TestChunkWriteReadWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadMultiple >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] >> TPDiskTest::TestLogSpliceNonceJump [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |74.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::NoTopicName [GOOD] |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] |74.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] >> TYardTest::TestBadDeviceInit [GOOD] >> TYardTest::TestChunkContinuity2 >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |74.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |74.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 >> TBlobStorageGroupInfoIterTest::Domains [GOOD] >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |74.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |74.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut >> TBlobStorageGroupInfoBlobMapTest::CheckCorrectBehaviourWithHashOverlow [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorForwardAndBackward >> TBlobStorageIngressMatrix::VectorTestEmpty [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary >> TYardTest::TestChunkContinuity2 [GOOD] >> TYardTest::TestChunkContinuity3000 >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 >> TBlobStorageGroupInfoIterTest::IteratorForwardAndBackward [GOOD] >> ErasureBrandNew::Block42_chunked [GOOD] >> TBlobStorageIngress::Ingress [GOOD] >> TBlobStorageIngress::IngressCacheMirror3 [GOOD] >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder [GOOD] >> TYardTest::TestDestroySystem >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseAnd [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement1 [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk [GOOD] >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] |74.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider >> TYardTest::TestChunkContinuity3000 [GOOD] >> TYardTest::TestChunkContinuity9000 |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] >> TBlobStorageIngress::IngressCreateFromRepl [GOOD] >> TBlobStorageIngress::IngressGetMainReplica [GOOD] >> TBlobStorageIngress::IngressHandoffPartsDelete [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyLogWrite >> TBlobStorageIngress::IngressPartsWeMustHaveLocally [GOOD] >> TBlobStorageIngress::IngressLocalParts [GOOD] >> TBlobStorageIngress::IngressPrintDistribution [GOOD] >> TLogoBlobTest::LogoBlobSort [GOOD] >> TMemoryStatsAggregator::Aggregate_Empty [GOOD] >> TMemoryStatsAggregator::Aggregate_Single [GOOD] >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] >> TableIndex::CompatibleSecondaryIndex [GOOD] >> TableIndex::NotCompatibleSecondaryIndex [GOOD] >> TableIndex::CompatibleVectorIndex [GOOD] >> TableIndex::NotCompatibleVectorIndex [GOOD] >> TBlobStorageGroupTypeTest::TestCorrectLayout [GOOD] >> TGuardianImpl::FollowerTracker [GOOD] >> TGuardianImpl::FollowerTrackerDuplicates [GOOD] >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] >> TBlobStorageIngressMatrix::VectorTestMinus [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] >> TBlobStorageIngressMatrix::VectorTest |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] >> TYardTest::TestChunkContinuity9000 [GOOD] >> TBlobStorageIngressMatrix::VectorTest [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore1 [GOOD] >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] >> TYardTest::TestChunkLock >> TStateStorageConfig::TestReplicaSelection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_chunked [GOOD] Test command err: totalSize# 497537061 period1# 2.991610s period2# 0.895941s MB/s1# 166.3108029 MB/s2# 555.3234655 factor# 3.339070318 |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TableIndex::NotCompatibleVectorIndex [GOOD] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressHandoffPartsDelete [GOOD] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] >> TYardTest::TestDestroySystem [GOOD] >> TYardTest::TestDestructionWhileWritingChunk >> TPDiskRaces::KillOwnerWhileDeletingChunk [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight >> TYardTest::TestChunkLock [GOOD] >> TYardTest::TestCheckSpace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 65 MemAvailable: 85 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 145 SoftLimit: 165 TargetUtilization: 185 ExternalConsumption: 194 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressPrintDistribution [GOOD] >> Path::Name_EnglishAlphabet [GOOD] >> Path::Name_RussianAlphabet [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C [GOOD] >> Path::Name_ExtraSymbols [GOOD] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] >> TBlobStorageGroupTypeTest::OutputInfoAboutErasureSpecies [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C_UTF8 [GOOD] >> Path::Name_WeirdLocale_RegularName [GOOD] >> Path::Name_WeirdLocale_WeirdName [GOOD] |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |74.4%| [LD] {RESULT} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut >> TYardTest::TestCheckSpace [GOOD] >> TYardTest::TestBootingState >> TPDiskTest::TestFakeErrorPDiskManyLogWrite [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] >> TYardTest::TestDestructionWhileWritingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingChunk |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_ExtraSymbols [GOOD] |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |74.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_WeirdLocale_WeirdName [GOOD] >> Path::CanonizeOld [GOOD] >> Path::CanonizeFast [GOOD] >> Path::CanonizedStringIsSame1 [GOOD] >> Path::CanonizedStringIsSame2 [GOOD] >> Path::Name_AllSymbols [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased >> TPDiskTest::TestFakeErrorPDiskLogRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskSysLogRead >> TLogoBlobIdHashTest::SimpleTestWithDifferentTabletId [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentSteps [GOOD] >> TLogoBlobTest::LogoBlobParse [GOOD] >> TLogoBlobTest::LogoBlobCompare [GOOD] >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_OneHost [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] >> THullDsGenericNWayIt::ForwardIteration [GOOD] >> THullDsGenericNWayIt::BackwardIteration [GOOD] >> TYardTest::TestDestructionWhileReadingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingLog >> TBlobStorageLinearTrackBar::TestLinearTrackBarDouble [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] >> TPDiskTest::TestFakeErrorPDiskSysLogRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkRead |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TLogoBlobIdHashTest::SimpleTest [GOOD] >> TLogoBlobIdHashTest::SimpleTestPartIdDoesNotMatter [GOOD] >> TLogoBlobIdHashTest::SimpleTestBlobSizeDoesNotMatter [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |74.4%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_AllSymbols [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 ExternalConsumption: 306 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 80 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TLogoBlobTest::LogoBlobCompare [GOOD] >> TYardTest::TestDestructionWhileReadingLog [GOOD] >> TYardTest::TestFormatInfo >> TBlobStorageBlocksCacheTest::Repeat [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsGenericNWayIt::BackwardIteration [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |74.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] >> TBlobStorageHullHugeChain::HeapAllocSmall [GOOD] >> TBlobStorageHullHugeDefs::FreeRes1 [GOOD] >> TBsVDiskDefrag::Defrag50PercentGarbage [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] Test command err: [0:1:0:3:1]# 173 184 157 167 152 185 195 192 144 [0:1:1:1:1]# 189 195 192 171 157 161 167 155 196 [0:1:3:3:1]# 184 157 182 152 185 157 192 144 189 [0:1:3:4:0]# 148 154 155 158 194 160 156 163 140 [0:1:2:3:2]# 152 177 174 176 154 146 161 170 168 [0:1:1:2:1]# 157 167 152 189 195 192 171 157 161 [0:1:1:0:2]# 158 150 131 167 177 161 177 174 173 [0:1:3:0:1]# 161 155 171 196 154 167 184 157 182 [0:1:0:3:2]# 174 173 152 146 184 176 168 157 161 [0:1:2:2:0]# 163 140 161 148 162 159 168 178 190 [0:1:0:2:0]# 161 156 163 159 196 148 190 162 168 [0:1:3:2:1]# 152 185 157 192 144 189 161 155 171 [0:1:2:3:1]# 157 182 173 185 157 167 144 189 195 [0:1:3:1:2]# 157 161 170 131 190 158 161 178 167 [0:1:2:0:1]# 155 171 157 154 167 155 157 182 173 [0:1:3:0:2]# 131 190 158 161 178 167 173 152 177 [0:1:2:0:2]# 190 158 150 178 167 177 152 177 174 [0:1:2:4:1]# 154 167 155 157 182 173 185 157 167 [0:1:2:1:2]# 161 170 168 190 158 150 178 167 177 [0:1:2:4:2]# 178 167 177 152 177 174 176 154 146 [0:1:0:2:1]# 167 152 185 195 192 144 157 161 155 [0:1:0:0:0]# 190 162 168 174 148 154 177 158 194 [0:1:3:2:0]# 156 163 140 196 148 162 162 168 178 [0:1:1:0:1]# 171 157 161 167 155 196 182 173 184 [0:1:0:2:2]# 146 184 176 168 157 161 150 131 190 [0:1:1:0:0]# 178 190 162 155 174 148 160 177 158 [0:1:2:3:0]# 194 160 177 163 140 161 148 162 159 [0:1:2:4:0]# 154 155 174 194 160 177 163 140 161 [0:1:1:3:2]# 177 174 173 154 146 184 170 168 157 [0:1:2:1:1]# 144 189 195 155 171 157 154 167 155 [0:1:1:1:0]# 162 159 196 178 190 162 155 174 148 [0:1:1:3:1]# 182 173 184 157 167 152 189 195 192 [0:1:3:4:1]# 196 154 167 184 157 182 152 185 157 [0:1:1:4:2]# 167 177 161 177 174 173 154 146 184 [0:1:0:1:0]# 159 196 148 190 162 168 174 148 154 [0:1:3:4:2]# 161 178 167 173 152 177 184 176 154 [0:1:0:0:1]# 157 161 155 155 196 154 173 184 157 [0:1:1:4:0]# 155 174 148 160 177 158 140 161 156 [0:1:2:1:0]# 148 162 159 168 178 190 154 155 174 [0:1:2:0:0]# 168 178 190 154 155 174 194 160 177 [0:1:3:3:2]# 173 152 177 184 176 154 157 161 170 [0:1:0:4:0]# 174 148 154 177 158 194 161 156 163 [0:1:1:2:0]# 140 161 156 162 159 196 178 190 162 [0:1:0:1:1]# 195 192 144 157 161 155 155 196 154 [0:1:3:0:0]# 162 168 178 148 154 155 158 194 160 [0:1:3:1:1]# 192 144 189 161 155 171 196 154 167 [0:1:0:4:1]# 155 196 154 173 184 157 167 152 185 [0:1:2:2:1]# 185 157 167 144 189 195 155 171 157 [0:1:3:1:0]# 196 148 162 162 168 178 148 154 155 [0:1:2:2:2]# 176 154 146 161 170 168 190 158 150 [0:1:0:3:0]# 177 158 194 161 156 163 159 196 148 [0:1:3:3:0]# 158 194 160 156 163 140 196 148 162 [0:1:0:1:2]# 168 157 161 150 131 190 177 161 178 [0:1:3:2:2]# 184 176 154 157 161 170 131 190 158 [0:1:1:3:0]# 160 177 158 140 161 156 162 159 196 [0:1:1:2:2]# 154 146 184 170 168 157 158 150 131 [0:1:1:4:1]# 167 155 196 182 173 184 157 167 152 [0:1:1:1:2]# 170 168 157 158 150 131 167 177 161 [0:1:0:0:2]# 150 131 190 177 161 178 174 173 152 [0:1:0:4:2]# 177 161 178 174 173 152 146 184 176 mean# 166.6666667 dev# 15.11254078 |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] >> TYardTest::TestFormatInfo [GOOD] >> TYardTest::TestEnormousDisk >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] >> TBlobStorageHullHugeHeap::RecoveryMode [GOOD] >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::Repeat [GOOD] >> TStateStorageConfig::TestReplicaSelection [GOOD] >> TStateStorageConfig::TestMultiReplicaFailDomains |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeDefs::FreeRes1 [GOOD] |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |74.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut >> TYardTest::TestBootingState [GOOD] >> TYardTest::Test3AsyncLog >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] >> DiscoveryConverterTest::CmWay [GOOD] >> DiscoveryConverterTest::AccountDatabase [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] >> TBlobStorageHullHugeChain::AllocFreeAllocTest [GOOD] >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction >> TBlobStorageHullHugeChain::HeapAllocLargeStandard [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] >> TYardTest::Test3AsyncLog [GOOD] >> TYardTest::TestChunkDelete >> DiscoveryConverterTest::MinimalName [GOOD] >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::AccountDatabase [GOOD] |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> TPDiskTest::TestFakeErrorPDiskManyChunkRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite >> TBsVDiskRepl1::ReplEraseDiskRestore [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] >> Scheme::TSerializedCellMatrix [GOOD] |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |74.9%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |74.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] >> TYardTest::TestChunkDelete [GOOD] >> TYardTest::TestChunkForget |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite [GOOD] >> TPDiskTest::PDiskRestart >> TChainLayoutBuilder::TestProdConf [GOOD] >> THugeHeapCtxTests::Basic [GOOD] |74.9%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |74.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |74.9%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/io_formats/arrow/ut/ydb-core-io_formats-arrow-ut |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl1::ReplEraseDiskRestore [GOOD] Test command err: 2024-11-21T23:02:27.430600Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:02:27.572276Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 9006315172505094597] 2024-11-21T23:02:27.778930Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TYardTest::TestChunkForget [GOOD] >> TYardTest::Test3HugeAsyncLog >> TopTest::Test1 [GOOD] |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |75.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::TSerializedCellMatrix [GOOD] >> TFlatDatabasePgTest::BasicTypes >> TFlatDatabasePgTest::BasicTypes [GOOD] |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |75.0%| [LD] {RESULT} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut >> TPDiskTest::PDiskRestart [GOOD] >> TPDiskTest::PDiskRestartManyLogWrites |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |75.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |75.0%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> THugeHeapCtxTests::Basic [GOOD] |75.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TopTest::Test2 [GOOD] >> TPDiskTest::PDiskRestartManyLogWrites [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |75.0%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] >> TChainLayoutBuilder::TestMilestoneId [GOOD] |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test1 [GOOD] |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |75.0%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> TFlatDatabasePgTest::BasicTypes [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] >> TBlobStorageHullHugeHeap::BorderValues [GOOD] |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |75.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart [GOOD] |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std >> TBsVDiskRepl3::AnubisTest [GOOD] >> TBsVDiskRepl3::ReplPerf |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test2 [GOOD] |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut >> Scheme::EmptyOwnedCellVec [GOOD] >> DiscoveryConverterTest::FirstClass [GOOD] >> DiscoveryConverterTest::FullLegacyNames [GOOD] |75.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestMilestoneId [GOOD] |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TPDiskTest::TestRealFile [GOOD] >> ReadBatcher::Range >> TPDiskTest::TestSIGSEGVInTUndelivered |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::BorderValues [GOOD] |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TDelayedResponsesTests::Test [GOOD] >> THullDsHeapItTest::HeapForwardIteratorAllEntities [GOOD] >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> TBlobStorageHullStorageRatio::Test [GOOD] >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] >> ReadBatcher::ReadBatcher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] Test command err: 2024-11-21T23:02:08.749237Z :BS_VDISK_PUT ERROR: VDISK[0:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:1:10:0:0:10:1] Marker# BSVS43 2024-11-21T23:02:13.792381Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T23:02:13.792429Z :BS_SKELETON ERROR: VDISK[0:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2024-11-21T23:02:13.792456Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T23:02:13.792458Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:1:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T23:02:13.792495Z :BS_SKELETON ERROR: VDISK[0:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2024-11-21T23:02:13.792583Z :BS_SKELETON ERROR: VDISK[0:_:0:1:1]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2024-11-21T23:02:13.793022Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:3:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T23:02:13.794419Z :BS_SKELETON ERROR: VDISK[0:_:0:3:1]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2024-11-21T23:02:13.796135Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T23:02:13.796815Z :BS_SKELETON ERROR: VDISK[0:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2024-11-21T23:02:13.797215Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:0:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2024-11-21T23:02:13.823019Z :BS_SKELETON ERROR: VDISK[0:_:0:0:1]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2024-11-21T23:02:13.913296Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:2:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::EmptyOwnedCellVec [GOOD] >> TPDiskTest::TestSIGSEGVInTUndelivered [GOOD] >> TPDiskTest::TestPDiskOnDifferentKeys |75.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FullLegacyNames [GOOD] |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.1%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption >> TBlobStorageHullCompactDeferredQueueTest::Basic |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |75.2%| [TA] $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |75.2%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] >> TPDiskTest::TestPDiskOnDifferentKeys [GOOD] >> TPDiskTest::WrongPDiskKey |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TDelayedResponsesTests::Test [GOOD] >> TopicNameConverterForCPTest::BadLegacyTopics [GOOD] >> TopicNameConverterForCPTest::BadModernTopics [GOOD] >> TBlobStorageHullDecimal::TestMkRatio [GOOD] >> TBlobStorageHullDecimal::TestMult [GOOD] >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark |75.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::Test3HugeAsyncLog [GOOD] >> TYardTest::TestChunkFlushReboot >> TPDiskTest::WrongPDiskKey [GOOD] >> TPDiskUtil::AtomicBlockCounterFunctional [GOOD] >> TPDiskUtil::AtomicBlockCounterSeqno [GOOD] >> TPDiskUtil::Light >> TPDiskUtil::Light [GOOD] >> TPDiskUtil::LightOverflow [GOOD] >> TPDiskUtil::DriveEstimator |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMult [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve [GOOD] >> TPDiskTest::SpaceColor [GOOD] >> TPDiskTest::RecreateWithInvalidPDiskKey |75.2%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::BadModernTopics [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator1 [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |75.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.3%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TCowBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekForwardPermutationsThreadSafe >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark >> TCowBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe >> TStateStorageConfig::TestMultiReplicaFailDomains [GOOD] >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations >> TBlobStorageDiskBlob::Merge >> TBlobStorageDiskBlob::Merge [GOOD] >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::RandomInsertInplace >> TBlobStorageHullDecimal::TestRoundToInt [GOOD] >> TBlobStorageHullDecimal::TestToUi64 [GOOD] >> Scheme::CompareUuidCells [GOOD] >> TSTreeTest::Basic |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] >> TSTreeTest::Basic [GOOD] >> TSVecTest::Basic [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] >> TFreshAppendixTest::IterateBackwardAll >> TBlobStorageIngressMatrix::MatrixTest [GOOD] >> TBlobStorageIngressMatrix::ShiftedBitVecBase [GOOD] >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TFreshAppendixTest::IterateBackwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] >> TPDiskTest::RecreateWithInvalidPDiskKey [GOOD] >> TPDiskTest::SmallDisk10Gb >> TYardTest::TestChunkFlushReboot [GOOD] >> TYardTest::TestAllocateAllChunks |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |75.5%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] |75.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::CompareUuidCells [GOOD] >> TIntervalSetTest::IntervalVecTestEmpty [GOOD] >> TIntervalSetTest::IntervalVecTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAdd |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TSVecTest::Basic [GOOD] >> ReadBatcher::ReadBatcher [GOOD] >> TBlobStorageDiskBlob::CreateFromDistinctParts >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 >> TIntervalSetTest::IntervalVecTestAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestAddAgainstReference >> TBlobStorageDiskBlob::CreateFromDistinctParts [GOOD] >> TBlobStorageDiskBlob::CreateIterate [GOOD] |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestToUi64 [GOOD] |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] >> TIntervalSetTest::IntervalVecTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecUnion |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] >> TYardTest::TestChunkWriteReadMultiple [GOOD] >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap >> TPDiskTest::SmallDisk10Gb [GOOD] >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart >> TYardTest::TestAllocateAllChunks [GOOD] >> TYardTest::TestChunkDeletionWhileWriting >> TStrongTypeTest::DefaultConstructorDeleted [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityReadyQueue [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriorityWithRemove [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriority [GOOD] >> TPriorityQueueTest::TestOrder [GOOD] >> TSimpleCacheTest::TestSimpleCache [GOOD] >> TULID::ParseAndFormat [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityWaitingQueue [GOOD] >> TPriorityOperationQueueTest::UpdateNonExistingShouldReturnFalse [GOOD] >> TULID::Generate [GOOD] >> TWildcardTest::TestWildcards [GOOD] >> TTokenBucketTest::Unlimited [GOOD] >> TWildcardTest::TestWildcard [GOOD] >> TQueueInplaceTests::CleanInDestructor [GOOD] >> TPriorityOperationQueueTest::ShouldStartEmpty [GOOD] >> TQueueInplaceTests::TestSimpleInplace [GOOD] >> TTokenBucketTest::DelayCalculation [GOOD] >> TTokenBucketTest::Limited [GOOD] >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] >> TULID::HeadByteOrder [GOOD] >> TStrongTypeTest::DefaultConstructorValue [GOOD] >> TULID::TailByteOrder [GOOD] >> TSimpleCacheTest::TestNotSoSimpleCache [GOOD] >> TULID::EveryBitOrder [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased [GOOD] >> TBsOther1::PoisonPill >> TBlobStorageIngress::BarrierIngressQuorumBasicMirror3_4_2 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumBasic4Plus2_8_1 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumMirror3 [GOOD] >> TIntervalSetTest::IntervalSetTestEmpty [GOOD] >> TIntervalSetTest::IntervalSetTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAdd >> TIntrusiveStackTest::TestPushPop [GOOD] >> TIntrusiveStackTest::TestEmptyPop [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty >> TIntervalSetTest::IntervalVecUnion [GOOD] >> TIntervalSetTest::IntervalVecUnionInplace >> TIntervalSetTest::IntervalSetTestAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::ReadBatcher [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference >> TCircularQueueTest::ShouldRemoveCurrent [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestAddAgainstReference >> TConcurrentRWHashTest::TInsertIfAbsentTest [GOOD] >> TCircularQueueTest::ShouldNotRemoveMissing [GOOD] >> TCowBTreeTest::Empty [GOOD] >> TCowBTreeTest::MultipleSnapshots >> TCircularQueueTest::ShouldRemoveCurrentLast [GOOD] >> TCowBTreeTest::Basics [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTestFunc [GOOD] >> TConcurrentRWHashTest::TEraseTest [GOOD] >> TCircularQueueTest::ShouldPush [GOOD] >> TConcurrentRWHashTest::TRemoveTest [GOOD] >> TConcurrentRWHashTest::TInsertTest [GOOD] >> TCircularQueueTest::ShouldNotPushTwice [GOOD] >> TConcurrentRWHashTest::TEmptyGetTest [GOOD] >> TCircularQueueTest::ShouldRemove [GOOD] >> TCowBTreeTest::ClearAndReuse [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenNothingStarted [GOOD] >> TCircularOperationQueueTest::ShouldTryToStartAnotherOneWhenStartFails [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight10 [GOOD] >> TCircularOperationQueueTest::ShouldStartEmpty [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue100 [GOOD] >> TCircularQueueTest::Empty [GOOD] >> TCircularOperationQueueTest::UseMinOperationRepeatDelayWhenTimeout [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue10 [GOOD] >> TCircularOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight3 [GOOD] >> TCircularOperationQueueTest::RemoveNonExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight1 [GOOD] >> TCircularOperationQueueTest::ShouldShuffle [GOOD] >> TYardTest::TestChunkDeletionWhileWriting [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight2 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight100 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue1 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue3 [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenHasWaitingAndStart [GOOD] >> TCircularOperationQueueTest::ShouldReturnExecTime [GOOD] >> TCircularOperationQueueTest::ShouldTolerateInaccurateTimer [GOOD] >> TCircularQueueTest::ShouldNextSingleItem [GOOD] >> TCircularQueueTest::ShouldNextMulti [GOOD] >> TCircularQueueTest::ShouldGetQueue [GOOD] |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageDiskBlob::CreateIterate [GOOD] |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TULID::EveryBitOrder [GOOD] >> TFragmentedBufferTest::TestWriteRead [GOOD] >> TFragmentedBufferTest::TestOverwriteRead [GOOD] >> TFragmentedBufferTest::TestIsNotMonolith [GOOD] >> TFragmentedBufferTest::TestSetMonolith [GOOD] >> TFragmentedBufferTest::TestReplaceWithSetMonolith [GOOD] >> THazardTest::CachedPointers [GOOD] >> THazardTest::AutoProtectedPointers [GOOD] >> THyperLogCounterTest::TestGetSet [GOOD] >> THyperLogCounterTest::TestIncrement [GOOD] >> THyperLogCounterTest::TestAddRandom >> TIntervalSetTest::IntervalVecUnionInplace [GOOD] >> TIntervalSetTest::IntervalVecUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecIntersection >> TIntervalSetTest::IntervalVecIntersection [GOOD] >> TIntervalSetTest::IntervalSetTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapUnion >> AddressClassifierTest::TestAddressExtraction [GOOD] >> AddressClassifierTest::TestAddressParsing [GOOD] >> AddressClassifierTest::TestClassfierWithAllIpTypes [GOOD] >> AddressClassifierTest::TestLabeledClassifier [GOOD] >> AddressClassifierTest::TestLabeledClassifierFromNetData >> TCacheCacheTest::Random [GOOD] >> TCacheTest::TestUnboundedMapCache [GOOD] >> TCacheTest::EnsureNoLeakAfterUnboundedCacheOnMapDtor [GOOD] >> TCacheTest::TestSizeBasedOverflowCallback [GOOD] >> TCacheTest::TestLruCache [GOOD] >> TCacheTest::EnsureNoLeakAfterLruCacheDtor [GOOD] >> TCacheTest::Test2QCache [GOOD] >> TCacheTest::EnsureNoLeakAfterQ2CacheDtor [GOOD] >> TCacheTest::TestUpdateItemSize [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight1 [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight2 [GOOD] >> TCircularOperationQueueTest::CheckOnDoneNotExisting [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotRunning [GOOD] >> TCircularOperationQueueTest::CheckRemoveRunning [GOOD] >> TCircularOperationQueueTest::CheckRemoveWaiting [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotExisting [GOOD] >> TCircularOperationQueueTest::CheckTimeout [GOOD] >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved [GOOD] >> TCircularOperationQueueTest::RemoveExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::BasicRPSCheck [GOOD] >> TCircularOperationQueueTest::BasicRPSCheckWithRound [GOOD] >> TCircularOperationQueueTest::CheckWakeupAfterStop [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted2 [GOOD] >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::BarrierIngressQuorumMirror3 [GOOD] |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |75.7%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut >> AddressClassifierTest::TestLabeledClassifierFromNetData [GOOD] >> TBitsTest::TestNaiveClz [GOOD] >> TBTreeTest::Basics [GOOD] >> TBTreeTest::ClearAndReuse >> TIntervalSetTest::IntervalVecIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecDifference >> THyperLogCounterTest::TestAddRandom [GOOD] >> THyperLogCounterTest::TestAddFixed |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc >> TBTreeTest::ClearAndReuse [GOOD] >> TBTreeTest::SeekForwardPermutationsInplace >> TBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TBTreeTest::SeekForwardPermutationsThreadSafe |75.7%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc >> TBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TCircleBufTest::SimpleTest [GOOD] >> TCircleBufTest::PtrTest [GOOD] >> TBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TLsnAllocTrackerTests::Test1 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse >> TBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TBTreeTest::RandomInsertInplace >> THyperLogCounterTest::TestAddFixed [GOOD] >> THyperLogCounterTest::TestHybridIncrement [GOOD] >> THyperLogCounterTest::TestHybridAdd [GOOD] >> TIntervalSetTest::IntervalMapTestEmpty [GOOD] >> TIntervalSetTest::IntervalMapTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAdd |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |75.8%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] Test command err: testing erasure none main# 0 main# 1 Checked 2 cases, took 25 us testing erasure block-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 1281913 us testing erasure mirror-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 512 cases, took 108827 us testing erasure block-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 446819 us testing erasure mirror-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 64 cases, took 38 us testing erasure block-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 2153201 us testing erasure stripe-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 604198 us >> TIntervalSetTest::IntervalVecDifference [GOOD] >> TIntervalSetTest::IntervalVecDifferenceInplaceSelf [GOOD] >> TIntrusiveFixedHashSetTest::TestEmptyFind [GOOD] >> TIntrusiveFixedHashSetTest::TestPushFindClear [GOOD] >> TIntrusiveHeapTest::TestEmpty [GOOD] >> TIntrusiveHeapTest::TestAddRemove [GOOD] >> TIntrusiveHeapTest::TestUpdateNoChange [GOOD] >> TIntrusiveHeapTest::TestUpdateIncrease [GOOD] >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TVDiskConfigTest::JustConfig [GOOD] >> TVDiskConfigTest::Basic [GOOD] >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] >> TVDiskConfigTest::RtmrProblem1 >> TIntervalSetTest::IntervalMapTestAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestAddAgainstReference |75.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |75.8%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens >> TVDiskConfigTest::RtmrProblem1 [GOOD] >> TVDiskConfigTest::RtmrProblem2 [GOOD] >> TVDiskConfigTest::ThreeLevels [GOOD] >> TCircleBufStringStreamTest::TestNotAligned [GOOD] >> TCircleBufStringStreamTest::TestOverflow [GOOD] >> TCircleBufTest::EmptyTest [GOOD] >> TCircleBufTest::OverflowTest [GOOD] |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkDeletionWhileWriting [GOOD] |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |75.9%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCircularQueueTest::ShouldGetQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] Test command err: 0.27388 |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock >> TIntervalSetTest::IntervalMapTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference >> TPDiskErrorStateTests::Basic [GOOD] >> TPDiskErrorStateTests::Basic2 [GOOD] >> TPDiskErrorStateTests::BasicErrorReason [GOOD] |75.9%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference [GOOD] |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable >> TIntervalSetTest::IntervalMapIntersection >> TIntervalSetTest::IntervalMapUnion [GOOD] >> TIntervalSetTest::IntervalSetUnion |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |75.9%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable >> TCowBTreeTest::RandomInsertInplace [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe >> TTrackable::TVector [GOOD] >> TTrackable::TList [GOOD] >> TTrackable::TString [GOOD] |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |76.0%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::ThreeLevels [GOOD] |76.0%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |76.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufTest::OverflowTest [GOOD] >> TBlobStorageSyncNeighborsTest::CheckRevLookup [GOOD] >> TBlobStorageSyncNeighborsTest::CheckIsMyDomain [GOOD] >> TBlobStorageSyncNeighborsTest::CheckFailDomainsIterators [GOOD] |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TPDiskErrorStateTests::BasicErrorReason [GOOD] |76.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] |76.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |76.1%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TString [GOOD] >> TIntervalSetTest::IntervalSetUnion [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace >> TResizableCircleBufTest::Test1 [GOOD] >> TResizableCircleBufTest::Test2 [GOOD] >> TTrackable::TBuffer [GOOD] |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] >> TIntervalSetTest::IntervalMapIntersection [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TBuffer [GOOD] |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart [GOOD] >> TPDiskTest::TestChunkWriteCrossOwner >> TIntervalSetTest::IntervalMapUnionInplace [GOOD] >> TIntervalSetTest::IntervalSetUnionInplace >> TBlobStorageBarriersTreeTest::Tree [GOOD] |76.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest |76.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLsnMngrTests::AllocLsnForLocalUse2Threads [GOOD] >> Scheme::NonEmptyOwnedCellVec [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads >> ThrottlerControlTests::Overflow_2 >> TBTreeTest::RandomInsertInplace [GOOD] >> TBTreeTest::RandomInsertThreadSafe >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] >> ThrottlerControlTests::Overflow_2 [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalMapDifference |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::Tree [GOOD] |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |76.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_2 [GOOD] >> TBlobStorageHullFresh::SimpleBackwardEnd [GOOD] >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] |76.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::NonEmptyOwnedCellVec [GOOD] |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |76.4%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] >> TIntervalSetTest::IntervalSetUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetIntersection >> TIntervalSetTest::IntervalMapDifference [GOOD] >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TIntervalSetTest::IntervalSetIntersection [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] |76.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |76.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> TIntervalSetTest::IntervalSetIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetDifference >> TCowBTreeTest::MultipleSnapshots [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc >> PgTest::DumpStringCells |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |76.6%| [LD] {RESULT} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |76.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |76.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty |76.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] >> VDiskTest::HugeBlobWrite >> TIntervalSetTest::IntervalSetDifference [GOOD] >> TIntervalSetTest::IntervalSetDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetTestIterator [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe [GOOD] >> TCowBTreeTest::SnapshotCascade [GOOD] >> TCowBTreeTest::SnapshotRollback >> TBsOther1::PoisonPill [GOOD] >> TBsOther1::ChaoticParallelWrite >> TBlobStorageHullFresh::SimpleForward [GOOD] >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] |76.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads [GOOD] >> TOutOfSpaceStateTests::TestLocal [GOOD] >> TOutOfSpaceStateTests::TestGlobal [GOOD] |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations [GOOD] >> TStateStorageConfig::UniformityTest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |76.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullFreshSegment::PerfAppendix |76.7%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |76.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TOutOfSpaceStateTests::TestGlobal [GOOD] >> TRegistryTests::TestAddGet [GOOD] >> TRegistryTests::TestCheckConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroQueueWeight [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> TBTreeTest::RandomInsertThreadSafe [GOOD] >> TBTreeTest::DuplicateKeysInplace |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |76.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client >> TSyncNeighborsTests::SerDes2 >> TSyncNeighborsTests::SerDes2 [GOOD] >> TBTreeTest::DuplicateKeysInplace [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |76.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |76.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] |76.8%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> TBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TBTreeTest::ShouldCallDtorsInplace >> TBTreeTest::ShouldCallDtorsInplace [GOOD] >> TBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TBTreeTest::Concurrent |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> ReadBatcher::Range [GOOD] >> TBlobStorageHullFresh::SolomonStandCrash [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TSyncNeighborsTests::SerDes3 [GOOD] >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |76.9%| [LD] {RESULT} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TCowBTreeTest::SnapshotRollback [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase >> TSyncNeighborsTests::SerDes1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |76.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBTreeTest::Concurrent [GOOD] >> TBTreeTest::IteratorDestructor [GOOD] >> TCacheCacheTest::MoveToWarm [GOOD] >> TCacheCacheTest::EvictNext [GOOD] >> CompressionTest::lz4_generator_basic [GOOD] >> CompressionTest::lz4_generator_deflates >> TBlobStorageSyncLogDsk::SeveralChunks [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_OnePageIndexed [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_SeveralPagesIndexed [GOOD] >> TBlobStorageSyncLogDsk::TrimLog [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> CompressionTest::lz4_generator_deflates [GOOD] >> StLog::Basic [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |76.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] |76.9%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::Range [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] >> TBlobStorageSyncLogMem::FilledIn1PutAfterSnapshot [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsPerf >> PersQueueCodecs::FromV1Codec [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::TrimLog [GOOD] >> RunLengthCodec::Random32 >> CodecsTest::Basic [GOOD] >> CodecsTest::NaturalNumbersAndZero [GOOD] >> CodecsTest::LargeAndRepeated [GOOD] >> NaiveFragmentWriterTest::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] Test command err: STEP 1 STEP 2 StringToId# 63 numItems# 110271 |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random32 >> VarLengthIntCodec::BasicTest64 [GOOD] >> VarLengthIntCodec::Random32 >> RunLengthCodec::Random32 [GOOD] >> RunLengthCodec::Random64 >> TCowBTreeTest::SnapshotRollbackEarlyErase [GOOD] >> TCowBTreeTest::ShouldCallDtorsInplace [GOOD] >> TCowBTreeTest::ShouldCallDtorsThreadSafe >> VarLengthIntCodec::Random32 [GOOD] >> VarLengthIntCodec::Random64 >> TCowBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TEventPriorityQueueTest::TestPriority [GOOD] >> TFastTlsTest::IterationAfterThreadDeath >> SemiSortedDeltaAndVarLengthCodec::Random32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random64 |77.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest >> PersQueueCodecs::FromV1Codec [GOOD] >> TFastTlsTest::IterationAfterThreadDeath [GOOD] >> TFastTlsTest::ManyThreadLocals >> RunLengthCodec::Random64 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] >> TFastTlsTest::ManyThreadLocals [GOOD] >> TFastTlsTest::ManyConcurrentKeys >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random64 [GOOD] >> SemiSortedDeltaCodec::BasicTest32 [GOOD] >> SemiSortedDeltaCodec::BasicTest64 [GOOD] >> TYardTest::TestChunkWriteReadWhole |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> NaiveFragmentWriterTest::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> StLog::Basic [GOOD] Test command err: Producer 0 worked for 0.3806722986 seconds Producer 1 worked for 0.1992060128 seconds Consumer 0 worked for 0.7749012132 seconds Consumer 1 worked for 0.8132901698 seconds Consumer 2 worked for 0.8065461852 seconds Consumer 3 worked for 0.5746245644 seconds >> VarLengthIntCodec::Random64 [GOOD] >> NameserviceConfigValidatorTests::TestRemoveTooMany [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] >> NaiveFragmentWriterTest::Long |77.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> ResourceBrokerConfigValidatorTests::TestMinConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoDefaultQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::BasicTest64 [GOOD] >> TBlobStorageSyncLogDsk::AddByOne >> TBlobStorageHullSstIt::TestSeekBefore [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndNext [GOOD] >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] >> TBlobStorageSyncLogDsk::AddByOne [GOOD] >> TBlobStorageSyncLogDsk::AddFive [GOOD] >> TBlobStorageSyncLogDsk::ComplicatedSerializeWithOverlapping [GOOD] >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] >> TopicNameConverterTest::Paths [GOOD] >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] |77.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::Random64 [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/simple_queue/simple_queue >> TFastTlsTest::ManyConcurrentKeys [GOOD] >> TFifoQueueTest::ShouldPushPop [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead2 [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead3 [GOOD] >> TFragmentedBufferTest::Test3WriteRead [GOOD] >> TFragmentedBufferTest::Test5WriteRead [GOOD] >> TFragmentedBufferTest::TestGetMonolith [GOOD] >> TFragmentedBufferTest::CopyFrom [GOOD] >> TFragmentedBufferTest::ReadWriteRandom >> NaiveFragmentWriterTest::Long [GOOD] >> ReorderCodecTest::Basic [GOOD] >> RunLengthCodec::BasicTest32 [GOOD] >> RunLengthCodec::BasicTest64 [GOOD] |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/simple_queue/simple_queue |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] |77.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |77.0%| [LD] {RESULT} $(B)/ydb/tools/simple_queue/simple_queue >> DiscoveryConverterTest::FullLegacyPath [GOOD] >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] |77.0%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] |77.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] >> TFreshAppendixTest::IterateForwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] >> TBlobStorageSyncLogData::SerializeParseEmpty1_Proto >> TStateStorageConfig::UniformityTest [GOOD] >> TBlobStorageSyncLogData::SerializeParseEmpty1_Proto [GOOD] >> TBlobStorageSyncLogData::SerializeParseEmpty2_Proto [GOOD] >> SemiSortedDeltaCodec::Random32 |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] >> SemiSortedDeltaCodec::Random32 [GOOD] >> SemiSortedDeltaCodec::Random64 |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> RunLengthCodec::BasicTest64 [GOOD] >> SemiSortedDeltaCodec::Random64 [GOOD] >> TActorTest::TestWaitFor [GOOD] |77.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] |77.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] >> TYardTest::TestChunkWriteReadWhole [GOOD] >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TStateStorageConfig::UniformityTest [GOOD] |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] >> TLogPriorityMuteTests::MuteUntilTest [GOOD] >> TLogPriorityMuteTests::AtomicMuteUntilTest [GOOD] >> TLogPriorityMuteTests::UnmuteTest [GOOD] >> TLogPriorityMuteTests::AtomicUnmuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteDurationTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteDurationTest [GOOD] >> TOneOneQueueTests::TestSimpleEnqueueDequeue [GOOD] >> TOneOneQueueTests::CleanInDestructor >> TBlobStorageSyncLogKeeper::CutLog_EntryPointNewFormat [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLog [GOOD] >> TBlobStorageSyncLogMem::FilledIn1 |77.1%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |77.1%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable >> TOneOneQueueTests::CleanInDestructor [GOOD] >> TOneOneQueueTests::ReadIterator [GOOD] >> TPageMapTest::TestResize [GOOD] >> TPageMapTest::TestRandom >> TBlobStorageSyncLogMem::FilledIn1 [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] >> TActorTest::TestSendFromAnotherThread >> TActorTest::TestDie [GOOD] >> TActorTest::TestFilteredGrab >> TActorTest::TestCreateChildActor |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::Random64 [GOOD] >> TActorTest::TestCreateChildActor [GOOD] >> TActorTest::TestBlockEvents >> TActorTest::TestFilteredGrab [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFor [GOOD] Test command err: ... waiting for value = 42 ... waiting for value = 42 (done) >> TActorTest::TestBlockEvents [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc >> TActorTest::TestSendEvent |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |77.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit >> TActorTest::TestSendEvent [GOOD] >> TActorTest::TestSendAfterDelay >> ThrottlerControlTests::Overflow_1 [GOOD] >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 >> TActorTest::TestStateSwitch |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] >> TActorTest::TestWaitFuture [GOOD] >> TActorTest::TestSendAfterDelay [GOOD] >> TActorTest::TestStateSwitch [GOOD] >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] |77.3%| [TA] $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestFilteredGrab [GOOD] |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig >> TActorTest::TestWaitForFirstEvent >> TActorTest::TestSendFromAnotherThread [GOOD] >> TActorTest::TestHandleEvent >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap [GOOD] >> TYardTest::TestChunkWrite20Read02 |77.3%| [TA] $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.3%| [TA] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TActorTest::TestHandleEvent [GOOD] >> TActorTest::TestGetCtxTime >> TActorTest::TestWaitForFirstEvent [GOOD] |77.3%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig >> TActorTest::TestGetCtxTime [GOOD] |77.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest >> PersQueueCodecs::ToV1Codec [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestBlockEvents [GOOD] Test command err: ... waiting for blocked 3 events ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 3 events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event (done) ... waiting for processed 2 more events ... waiting for processed 2 more events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for processed 3 more events ... waiting for processed 3 more events (done) >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] |77.4%| [TA] {RESULT} $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_1 [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendAfterDelay [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound >> TActorTest::TestScheduleEvent |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestStateSwitch [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendFromAnotherThread [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] >> TActorTest::TestScheduleEvent [GOOD] >> TActorTest::TestScheduleReaction >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst >> TActorTest::TestScheduleReaction [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock [GOOD] >> TPDiskRaces::Decommit |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFuture [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestGetCtxTime [GOOD] >> TRegistryTests::TestLock [GOOD] >> TRegistryTests::TestClasses [GOOD] >> TRegistryTests::TestDisableEnable [GOOD] >> TYardTest::TestChunkWrite20Read02 [GOOD] >> TYardTest::TestChunkUnlock >> TBsVDiskRepl3::ReplPerf [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToLast [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] >> NameserviceConfigValidatorTests::TestLongWalleDC [GOOD] >> NameserviceConfigValidatorTests::TestModifyClusterUUID [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForAddrPort |77.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest >> PersQueueCodecs::ToV1Codec [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForAddrPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] >> TBlobStorageHullSstIt::TestSeekToFirst [GOOD] >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] >> TBlobStorageHullFreshSegment::PerfAppendix [GOOD] >> TBlobStorageHullFreshSegment::PerfSkipList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitForFirstEvent [GOOD] Test command err: ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> TRegistryTests::TestDisableEnable [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex >> TYardTest::TestChunkUnlock [GOOD] >> TYardTest::TestChunkUnlockHarakiri |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestScheduleReaction [GOOD] |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |77.4%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] >> BootstrapTabletsValidatorTests::TestNoNodeForTablet [GOOD] >> BootstrapTabletsValidatorTests::TestRequiredTablet [GOOD] >> BootstrapTabletsValidatorTests::TestImportantTablet [GOOD] >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/olap_workload/olap_workload |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/olap_workload/olap_workload |77.5%| [LD] {RESULT} $(B)/ydb/tools/olap_workload/olap_workload >> TYardTest::TestChunkUnlockHarakiri [GOOD] >> TYardTest::TestChunkUnlockRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl3::ReplPerf [GOOD] Test command err: 2024-11-21T23:02:19.994999Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:02:20.023863Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 9270801616954868233] 2024-11-21T23:02:20.193388Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T23:02:32.797089Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:02:33.011402Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16075693668100032075] 2024-11-21T23:02:33.055549Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2024-11-21T23:02:55.349341Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:02:55.590543Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10555992083922062368] 2024-11-21T23:02:56.717652Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |77.5%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a >> SamplingControlTests::Simple [GOOD] |77.4%| [AR] {RESULT} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |77.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] |77.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc [GOOD] >> TCowBTreeTest::DuplicateKeysInplace >> TFragmentedBufferTest::ReadWriteRandom [GOOD] >> TBlobStorageHullSstIt::TestSeekToLast [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] >> BootstrapTabletsValidatorTests::TestUnknownNodeForTablet [GOOD] >> NameserviceConfigValidatorTests::TestAddNewNode >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] >> TYardTest::TestChunkUnlockRestart [GOOD] >> TYardTest::TestChunkReserve >> NameserviceConfigValidatorTests::TestAddNewNode [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingHostPort [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsPerf [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsBuildSwapSnapshot [GOOD] >> VarLengthIntCodec::BasicTest32 [GOOD] |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::Simple [GOOD] |77.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest >> Metrics::CombineSubItems [GOOD] |77.5%| [TA] $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCowBTreeTest::DuplicateKeysInplace [GOOD] >> TCowBTreeTest::DuplicateKeysThreadSafe |77.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a >> TYardTest::TestChunkReserve [GOOD] >> Metrics::MoreThanFiveItems [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] |77.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest >> TYardTest::TestChunkRecommit |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice >> TQueryResultSizeTrackerTest::CheckAll >> TYardTest::TestChunkRecommit [GOOD] >> TYardTest::TestChunkRestartRecommit >> TQueryResultSizeTrackerTest::CheckAll [GOOD] >> Metrics::OnlyOneItem [GOOD] |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |77.5%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::CombineSubItems [GOOD] |77.6%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |77.6%| [LD] {RESULT} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::MoreThanFiveItems [GOOD] >> DSProxyStrategyTest::Restore_block42 |77.6%| [AR] {RESULT} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |77.6%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::BasicTest32 [GOOD] >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |77.6%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::OnlyOneItem [GOOD] |77.6%| [TA] {RESULT} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a >> TYardTest::TestChunkRestartRecommit [GOOD] >> TYardTest::TestDamagedFirstRecordToKeep >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize >> TCowBTreeTest::DuplicateKeysThreadSafe [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TCowBTreeTest::IteratorDestructor [GOOD] >> TCowBTreeTest::Concurrent >> NameserviceConfigValidatorTests::TestModifyIdForHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyResolveHost [GOOD] >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TFragmentedBufferTest::ReadWriteRandom [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> TCowBTreeTest::Concurrent [GOOD] >> TCowBTreeTest::Alignment [GOOD] >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/cfg/bin/ydb_configure |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/cfg/bin/ydb_configure |77.6%| [LD] {RESULT} $(B)/ydb/tools/cfg/bin/ydb_configure |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |77.7%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> TQueueBackpressureTest::PerfInFlight |77.7%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> TQueueBackpressureTest::CreateDelete [GOOD] |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |77.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TYardTest::TestSysLogReordering [GOOD] >> TYardTest::TestStartingPoints >> SysViewQueryHistory::TopReadBytesAdd >> SysViewQueryHistory::TopReadBytesAdd [GOOD] >> TQueueBackpressureTest::PerfTrivial |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> TBsOther1::ChaoticParallelWrite [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |77.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::CreateDelete [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCowBTreeTest::Alignment [GOOD] Test command err: Producer 0 worked for 0.2365092442 seconds Producer 1 worked for 0.2312320856 seconds Consumer 0 worked for 0.2532762399 seconds on a snapshot of size 20000 Consumer 1 worked for 0.1647604605 seconds on a snapshot of size 40000 Consumer 2 worked for 0.477180915 seconds on a snapshot of size 60000 Consumer 3 worked for 0.4262207131 seconds on a snapshot of size 80000 Consumers had 1199972 successful seeks >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] >> TQueueBackpressureTest::IncorrectMessageId >> TQueueBackpressureTest::IncorrectMessageId [GOOD] >> SysViewQueryHistory::TopDurationAdd [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopReadBytesAdd [GOOD] >> Scheme::YqlTypesMustBeDefined [GOOD] >> SchemeBorders::Partial [GOOD] >> SysViewQueryHistory::AddDedup [GOOD] >> SysViewQueryHistory::AddDedup2 [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] >> TYardTest::TestDamagedFirstRecordToKeep [GOOD] >> TYardTest::TestDamageAtTheBoundary |77.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> SysViewQueryHistory::AggrMergeDedup [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] >> TYardTest::TestStartingPoints [GOOD] >> TYardTest::TestWhiteboard |77.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> SchemeBorders::Partial [GOOD] |77.7%| [TA] $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SysViewQueryHistory::StableMerge2 [GOOD] >> DSProxyStrategyTest::Restore_mirror3dc |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::IncorrectMessageId [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> SchemeBorders::Full [GOOD] |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopDurationAdd [GOOD] |77.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::YqlTypesMustBeDefined [GOOD] |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMergeDedup [GOOD] >> Scheme::UnsafeAppend [GOOD] |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> TBsLocalRecovery::StartStopNotEmptyDB [GOOD] >> TBsLocalRecovery::WriteRestartRead |77.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedup2 [GOOD] >> Scheme::CellVecTryParse [GOOD] >> Scheme::CompareOrder [GOOD] |77.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge2 [GOOD] >> TYardTest::TestWhiteboard [GOOD] >> TYardTest::TestMultiYardStartingPoints >> TQueueBackpressureTest::PerfTrivial [GOOD] |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest >> TErasureTypeTest::TestStripe43LossOfAllPossible3 >> TErasureTypeTest::TestBlock31LossOfAllPossible1 |77.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> SchemeBorders::Full [GOOD] >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step >> TQueueBackpressureTest::PerfInFlight [GOOD] |77.8%| [TA] {RESULT} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |77.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::UnsafeAppend [GOOD] |77.8%| [TA] $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/statistics_workload/statistics_workload |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/statistics_workload/statistics_workload |77.8%| [TA] {RESULT} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> Metrics::SeveralSubItems [GOOD] |77.8%| [LD] {RESULT} $(B)/ydb/tools/statistics_workload/statistics_workload |77.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::CompareOrder [GOOD] |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfTrivial [GOOD] >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] |77.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest >> TYardTest::TestMultiYardStartingPoints [GOOD] >> TYardTest::TestSysLogOverwrite |77.8%| [TA] $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfInFlight [GOOD] >> Metrics::SeveralTopItems [GOOD] >> Metrics::EmptyIssuesList [GOOD] |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::SeveralSubItems [GOOD] |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] >> SamplingControlTests::EdgeCaseUpper [GOOD] >> TErasureTypeTest::TestBlock32LossOfAllPossible2 >> ValidationTests::AdvancedCopyTo [GOOD] >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] >> ThrottlerControlTests::LongIdle [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::SeveralTopItems [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::EmptyIssuesList [GOOD] |77.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/codecs/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/metrics/ut/unittest |77.9%| [TA] $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::AdvancedCopyTo [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseUpper [GOOD] |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/dqrun >> ValidationTests::HasReservedPaths [GOOD] |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step >> ThrottlerControlTests::Simple [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::LongIdle [GOOD] >> ValidationTests::MapType [GOOD] >> ValidationTests::CanCopyTo |77.9%| [TA] {RESULT} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.9%| [TA] {RESULT} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] >> ValidationTests::CanCopyTo [GOOD] >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest >> TErasureTypeTest::TestStripe31LossOfAllPossible1 |77.9%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |77.9%| [TA] $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.9%| [TA] $(B)/ydb/core/persqueue/codecs/ut/test-results/unittest/{meta.json ... results_accumulator.log} |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::HasReservedPaths [GOOD] >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::MapType [GOOD] |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Simple [GOOD] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> JsonEnvelopeTest::Simple [GOOD] >> JsonEnvelopeTest::NoReplace >> TYardTest::TestSysLogOverwrite [GOOD] >> TYardTest::TestUpsAndDownsAtTheBoundary |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanCopyTo [GOOD] >> JsonEnvelopeTest::NoReplace [GOOD] >> JsonEnvelopeTest::ArrayItem [GOOD] >> JsonEnvelopeTest::Escape [GOOD] >> JsonEnvelopeTest::BinaryData [GOOD] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] |78.0%| [TA] $(B)/ydb/core/fq/libs/metrics/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ValidationTests::CanDispatchByTag [GOOD] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest >> TPageMapTest::TestRandom [GOOD] >> TPageMapTest::TestIntrusive [GOOD] >> TPageMapTest::TestSimplePointer [GOOD] >> TPageMapTest::TestSharedPointer [GOOD] >> TPageMapTest::TestSimplePointerFull |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] >> TPageMapTest::TestSimplePointerFull [GOOD] >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanDispatchByTag [GOOD] |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/log_backend/ut/unittest >> JsonEnvelopeTest::BinaryData [GOOD] >> OldFormat::SameVersion [GOOD] >> OldFormat::DefaultRules [GOOD] >> OldFormat::PrevYear [GOOD] >> OldFormat::Trunk [GOOD] >> OldFormat::UnexpectedTrunk [GOOD] >> OldFormat::TooOld [GOOD] >> OldFormat::OldNbs [GOOD] >> VersionParser::Basic [GOOD] >> YdbVersion::DefaultSameVersion [GOOD] >> YdbVersion::DefaultPrevMajor [GOOD] >> YdbVersion::DefaultNextMajor [GOOD] >> YdbVersion::DefaultHotfix [GOOD] >> YdbVersion::DefaultCompatible [GOOD] >> YdbVersion::DefaultNextYear [GOOD] >> YdbVersion::DefaultPrevYear [GOOD] >> YdbVersion::DefaultNewMajor [GOOD] >> YdbVersion::DefaultOldMajor [GOOD] >> YdbVersion::DefaultDifferentBuild [GOOD] >> YdbVersion::DefaultDifferentBuildIncompatible [GOOD] >> YdbVersion::LimitOld [GOOD] >> YdbVersion::LimitNew [GOOD] >> YdbVersion::CurrentCanLoadFrom [GOOD] >> YdbVersion::CurrentCanLoadFromAllOlder [GOOD] >> YdbVersion::CurrentCanLoadFromIncompatible [GOOD] >> YdbVersion::CurrentStoresReadableBy [GOOD] >> YdbVersion::StoredReadableBy [GOOD] >> YdbVersion::StoredReadableByIncompatible [GOOD] >> YdbVersion::StoredWithRules [GOOD] >> YdbVersion::StoredWithRulesIncompatible [GOOD] >> YdbVersion::OldNbsStored [GOOD] >> YdbVersion::OldNbsIncompatibleStored [GOOD] >> YdbVersion::NewNbsCurrent [GOOD] >> YdbVersion::NewNbsIncompatibleCurrent [GOOD] >> YdbVersion::OneAcceptedVersion [GOOD] >> YdbVersion::ForbiddenMinor [GOOD] >> YdbVersion::DefaultRulesWithExtraForbidden [GOOD] >> YdbVersion::ExtraAndForbidden [GOOD] >> YdbVersion::SomeRulesAndOtherForbidden [GOOD] >> YdbVersion::Component [GOOD] >> YdbVersion::OtherComponent [GOOD] >> YdbVersion::YDBAndNbs [GOOD] >> YdbVersion::DifferentYdbVersionsWithNBSRules [GOOD] >> YdbVersion::TrunkYDBAndNbs [GOOD] >> YdbVersion::TrunkAndStable [GOOD] >> YdbVersion::CompatibleWithSelf [GOOD] >> YdbVersion::PrintCurrentVersionProto [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] Test command err: 2024-11-21T23:03:01.366777Z :BS_SYNCLOG ERROR: VDISK[0:_:0:0:0]: Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:3:1] targetVDisk# [0:1:0:0:0] 2024-11-21T23:03:01.366824Z :BS_SYNCLOG ERROR: VDISK[0:_:0:0:0]: Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:2:0] targetVDisk# [0:1:0:0:0] 2024-11-21T23:03:01.366874Z :BS_SYNCLOG ERROR: VDISK[0:_:0:0:0]: Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:1:1] targetVDisk# [0:1:0:0:0] 2024-11-21T23:03:01.366904Z :BS_SYNCLOG ERROR: VDISK[0:_:0:0:0]: Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:3:0] targetVDisk# [0:1:0:0:0] 2024-11-21T23:03:01.366932Z :BS_SYNCLOG ERROR: VDISK[0:_:0:0:0]: Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:2:1] targetVDisk# [0:1:0:0:0] 2024-11-21T23:03:01.366969Z :BS_SYNCLOG ERROR: VDISK[0:_:0:0:0]: Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:1:0] targetVDisk# [0:1:0:0:0] 2024-11-21T23:03:01.366998Z :BS_SYNCLOG ERROR: VDISK[0:_:0:0:0]: Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:0:1] targetVDisk# [0:1:0:0:0] >> Mvp::TokenatorGetMetadataTokenGood >> Mvp::TokenatorGetMetadataTokenGood [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/driver_lib/version/ut/unittest >> YdbVersion::PrintCurrentVersionProto [GOOD] Test command err: Application: "ydb" |78.0%| [TA] $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ClosedIntervalSet::Union |78.0%| [TA] $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TPDiskRaces::Decommit [GOOD] >> TPDiskRaces::DecommitWithInflight >> TBsLocalRecovery::WriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartRead >> TFunctionsMetadataTest::Serialization >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 >> TYardTest::TestDamageAtTheBoundary [GOOD] >> TYardTest::TestCutMultipleLogChunks >> Backpressure::MonteCarlo >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] >> MdbEndpoingGenerator::Legacy [GOOD] >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] >> TFunctionsMetadataTest::Serialization [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] >> TestS3UrlEscape::EscapeEscapedForce [GOOD] >> TestS3UrlEscape::EscapeUnescapeForceRet [GOOD] >> ArrowInferenceTest::csv_simple >> ArrowInferenceTest::csv_simple [GOOD] >> ArrowInferenceTest::tsv_simple [GOOD] >> TArrowPushDown::SimplePushDown [GOOD] >> TArrowPushDown::FilterEverything [GOOD] >> TArrowPushDown::MatchSeveralRowGroups >> ExternalDataSourceTest::ValidateName [GOOD] >> ExternalDataSourceTest::ValidatePack [GOOD] >> ExternalDataSourceTest::ValidateAuth [GOOD] >> ExternalDataSourceTest::ValidateParameters [GOOD] >> ExternalDataSourceTest::ValidateHasExternalTable [GOOD] >> ExternalDataSourceTest::ValidateProperties [GOOD] >> ExternalDataSourceTest::ValidateLocation >> TArrowPushDown::MatchSeveralRowGroups [GOOD] |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/metadata/ut/unittest >> TFunctionsMetadataTest::Serialization [GOOD] >> ExternalDataSourceTest::ValidateLocation [GOOD] >> ObjectStorageTest::SuccessValidation [GOOD] >> ObjectStorageTest::FailedCreate [GOOD] >> ObjectStorageTest::FailedValidation [GOOD] >> ObjectStorageTest::FailedJsonListValidation [GOOD] >> ObjectStorageTest::WildcardsValidation [GOOD] |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] >> TCollectingS3ListingStrategyTests::IfNoIssuesOccursShouldReturnCollectedPaths [GOOD] >> TCollectingS3ListingStrategyTests::IfThereAreMoreRecordsThanSpecifiedByLimitShouldReturnError [GOOD] >> TCollectingS3ListingStrategyTests::IfAnyIterationReturnIssueThanWholeStrategyShouldReturnIt [GOOD] >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] >> ArrowTest::BatchBuilder >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/common/ut/unittest >> TestS3UrlEscape::EscapeUnescapeForceRet [GOOD] >> ParseStats::ParseWithSources [GOOD] >> ParseStats::ParseJustOutput [GOOD] >> ParseStats::ParseMultipleGraphsV1 >> ParseStats::ParseMultipleGraphsV1 [GOOD] >> ParseStats::ParseMultipleGraphsV2 [GOOD] >> ArrowTest::BatchBuilder [GOOD] >> ArrowTest::ArrowToYdbConverter |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/external_sources/object_storage/inference/ut/gtest >> ArrowInferenceTest::tsv_simple [GOOD] |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/test_connection/ut/unittest |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/external_sources/ut/unittest >> ObjectStorageTest::WildcardsValidation [GOOD] >> ArrowTest::ArrowToYdbConverter [GOOD] >> ArrowTest::KeyComparison [GOOD] >> ArrowTest::SortWithCompositeKey [GOOD] >> ArrowTest::MergingSortedInputStream >> ConsoleDumper::Basic [GOOD] >> ConsoleDumper::CoupleMerge >> ArrowTest::MergingSortedInputStream [GOOD] >> ArrowTest::MergingSortedInputStreamReversed [GOOD] >> ArrowTest::MergingSortedInputStreamReplace [GOOD] >> ColumnFilter::MergeFilters [GOOD] >> ColumnFilter::CombineFilters [GOOD] >> Dictionary::Simple >> ConsoleDumper::CoupleMerge [GOOD] >> ConsoleDumper::CoupleOverwrite [GOOD] >> ConsoleDumper::CoupleMergeOverwriteRepeated >> TYardTest::TestCutMultipleLogChunks [GOOD] >> TYardTest::TestChunkPriorityBlock >> GroupStress::Test [GOOD] |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/actors/ut/unittest >> TArrowPushDown::MatchSeveralRowGroups [GOOD] >> ConsoleDumper::CoupleMergeOverwriteRepeated [GOOD] >> ConsoleDumper::ReverseMerge [GOOD] >> ConsoleDumper::ReverseOverwrite [GOOD] >> ConsoleDumper::ReverseMergeOverwriteRepeated [GOOD] >> ConsoleDumper::Different |78.0%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/provider/ut/unittest >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] >> ConsoleDumper::Different [GOOD] >> ConsoleDumper::SimpleNode [GOOD] >> ConsoleDumper::JoinSimilar >> ConsoleDumper::JoinSimilar [GOOD] >> ConsoleDumper::DontJoinDifferent [GOOD] >> ConsoleDumper::SimpleTenant [GOOD] >> ConsoleDumper::SimpleNodeTenant [GOOD] >> ConsoleDumper::SimpleHostId >> ConsoleDumper::SimpleHostId [GOOD] >> ConsoleDumper::SimpleNodeId [GOOD] >> ConsoleDumper::DontJoinNodeTenant [GOOD] >> ConsoleDumper::JoinMultipleSimple [GOOD] >> ConsoleDumper::MergeNode [GOOD] >> ConsoleDumper::MergeOverwriteRepeatedNode [GOOD] >> ConsoleDumper::Ordering ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/core/ut/unittest >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] Test command err: 2024-11-21T23:03:22.109469Z :MVP DEBUG: Refreshing token metadataTokenName 2024-11-21T23:03:22.109761Z :MVP DEBUG: Updating metadata token 2024-11-21T23:03:22.158888Z :MVP DEBUG: Refreshing token metadataTokenName 2024-11-21T23:03:22.159166Z :MVP DEBUG: Updating metadata token 2024-11-21T23:03:27.161956Z :MVP DEBUG: Refreshing token metadataTokenName 2024-11-21T23:03:27.163403Z :MVP DEBUG: Updating metadata token >> FormatCSV::Instants [GOOD] >> FormatCSV::EmptyData [GOOD] >> FormatCSV::Common >> ConsoleDumper::Ordering [GOOD] >> ConsoleDumper::IgnoreUnmanagedItems [GOOD] >> YamlConfig::CollectLabels >> YamlConfig::CollectLabels [GOOD] >> YamlConfig::MaterializeSpecificConfig [GOOD] >> YamlConfig::MaterializeAllConfigSimple >> YamlConfig::MaterializeAllConfigSimple [GOOD] >> YamlConfig::MaterializeAllConfigs >> FormatCSV::Common [GOOD] >> FormatCSV::Strings [GOOD] >> FormatCSV::Nulls >> FormatCSV::Nulls [GOOD] >> Mirror3of4::ReplicationSmall |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest >> ParseStats::ParseMultipleGraphsV2 [GOOD] |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_group/unittest >> GroupStress::Test [GOOD] >> YamlConfig::MaterializeAllConfigs [GOOD] >> YamlConfig::AppendVolatileConfig [GOOD] >> YamlConfig::AppendAndResolve >> YamlConfig::AppendAndResolve [GOOD] >> YamlConfig::GetMetadata [GOOD] >> YamlConfig::ReplaceMetadata [GOOD] >> YamlConfigParser::Iterate [GOOD] >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 [GOOD] >> YamlConfigParser::PdiskCategoryFromString [GOOD] >> YamlConfigParser::AllowDefaultHostConfigId >> YamlConfigParser::AllowDefaultHostConfigId [GOOD] >> YamlConfigParser::IncorrectHostConfigIdFails [GOOD] >> YamlConfigParser::NoMixedHostConfigIds >> YamlConfigParser::NoMixedHostConfigIds [GOOD] >> YamlConfigProto2Yaml::StorageConfig [GOOD] |78.1%| [TA] $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PushdownTest::NoFilter ------- [TS] {asan, default-linux-x86_64, release} ydb/core/io_formats/arrow/ut/unittest >> FormatCSV::Nulls [GOOD] Test command err: 12000000 Cannot read CSV: no columns specified Cannot read CSV: Invalid: Empty CSV file d'Artagnan '"' Jeanne d'Arc "'" 'd'Artagnan' ''"'' 'Jeanne d'Arc' '"'"' d'Artagnan '"' Jeanne d'Arc "'" src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: \N,"","" \N,"\N","\N" \N,\N,\N parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,\N,\N ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: NULL,"","" NULL,"NULL","NULL" NULL,NULL,NULL parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,NULL,NULL ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ >> PushdownTest::NoFilter [GOOD] >> ActionParsingTest::ToAndFromStringAreConsistent [GOOD] >> ActionParsingTest::ActionsForQueueTest [GOOD] >> ActionParsingTest::BatchActionTest [GOOD] >> ActionParsingTest::ActionsForMessageTest [GOOD] >> ActionParsingTest::FastActionsTest [GOOD] >> HttpCountersTest::CountersAggregationTest [GOOD] >> LazyCounterTest::LazyCounterTest >> TYardTest::TestChunkPriorityBlock [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut/unittest >> YamlConfigProto2Yaml::StorageConfig [GOOD] Test command err: host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" "\/dev\/disk\/by-partlabel\/kikimr_nvme_02" host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" host_configs: - host_config_id: 1 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: NVME expected_slot_count: 9 - path: /dev/disk/by-partlabel/kikimr_nvme_02 type: NVME expected_slot_count: 9 - host_config_id: 2 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: SSD expected_slot_count: 9 hosts: - host: sas8-6954.search.yandex.net port: 19000 host_config_id: 1 - host: sas8-6955.search.yandex.net port: 19000 host_config_id: 2 item_config_generation: 0 >> TYardTest::TestUpsAndDownsAtTheBoundary [GOOD] >> TYardTest::TestUnflushedChunk >> LazyCounterTest::LazyCounterTest [GOOD] >> LazyCounterTest::AggregationLazyTest [GOOD] >> LazyCounterTest::AggregationNonLazyTest >> LazyCounterTest::AggregationNonLazyTest [GOOD] >> LazyCounterTest::HistogramAggregationTest [GOOD] >> MessageAttributeValidationTest::MessageAttributeValidationTest [GOOD] >> MessageBodyValidationTest::MessageBodyValidationTest [GOOD] >> MeteringCountersTest::CountersAggregationTest [GOOD] >> NameValidationTest::NameValidationTest [GOOD] >> QueueAttributes::BasicStdTest [GOOD] >> QueueAttributes::BasicFifoTest [GOOD] >> QueueAttributes::BasicClampTest >> PushdownTest::Equal >> QueueAttributes::BasicClampTest [GOOD] >> QueueCountersTest::InsertCountersTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithoutFolderTest >> QueueCountersTest::RemoveQueueCountersLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithFolderTest >> PushdownTest::Equal [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithFolderTest [GOOD] >> QueueCountersTest::CountersAggregationTest >> Mvp::OpenIdConnectRequestWithIamTokenYandex >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] >> QueueCountersTest::CountersAggregationTest [GOOD] >> QueueCountersTest::CountersAggregationCloudTest [GOOD] >> RedrivePolicy::RedrivePolicyValidationTest [GOOD] >> RedrivePolicy::RedrivePolicyToJsonTest >> RedrivePolicy::RedrivePolicyToJsonTest [GOOD] >> RedrivePolicy::RedrivePolicyArnValidationTest [GOOD] >> SecureProtobufPrinterTest::MessageBody >> Mvp::OpenIdConnectRequestWithIamTokenYandex [GOOD] >> Mvp::OpenIdConnectRequestWithIamTokenNebius [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex >> SecureProtobufPrinterTest::MessageBody [GOOD] >> SecureProtobufPrinterTest::Tokens [GOOD] >> StringValidationTest::IsAlphaNumAndPunctuationTest [GOOD] >> UserCountersTest::DisableCountersTest >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodNebius >> UserCountersTest::DisableCountersTest [GOOD] >> UserCountersTest::RemoveUserCountersTest [GOOD] >> UserCountersTest::CountersAggregationTest >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieYandex >> PushdownTest::NotEqualInt32Int64 >> UserCountersTest::CountersAggregationTest [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieYandex [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius >> PushdownTest::NotEqualInt32Int64 [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius [GOOD] >> Mvp::OpenIdConnectProxyOnHttpsHost |78.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] >> TBsDbStat::ChaoticParallelWrite_DbStat [GOOD] >> TBsHuge::Simple >> PushdownTest::TrueCoalesce [GOOD] >> Mvp::OpenIdConnectProxyOnHttpsHost [GOOD] >> Mvp::OpenIdConnectFixLocationHeader |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkPriorityBlock [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge >> PushdownTest::CmpInt16AndInt32 [GOOD] >> Mvp::OpenIdConnectFixLocationHeader [GOOD] >> Mvp::OpenIdConnectExchangeNebius >> Mvp::OpenIdConnectExchangeNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail >> PushdownTest::PartialAnd |78.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/base/ut/unittest >> UserCountersTest::CountersAggregationTest [GOOD] >> PushdownTest::PartialAnd [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlow >> Mvp::OpenIdConnectFullAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax >> PushdownTest::PartialAndOneBranchPushdownable >> PushdownTest::PartialAndOneBranchPushdownable [GOOD] >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] >> integrations_test.py::test_read_jtest_with_one_result [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow >> PushdownTest::NotNull >> PushdownTest::NotNull [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] >> TBsVDiskOutOfSpace::WriteUntilOrangeZone [GOOD] >> TBsVDiskOutOfSpace::WriteUntilYellowZone >> PushdownTest::NotNullForDatetime [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid >> PushdownTest::IsNull [GOOD] >> PushdownTest::StringFieldsNotSupported [GOOD] >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax >> PushdownTest::StringFieldsNotSupported2 [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed >> TBsLocalRecovery::MultiPutWriteRestartReadHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX >> TYardTest::TestUnflushedChunk [GOOD] >> TYardTest::TestRedZoneSurvivability ------- [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/dl5p/000d60/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/dl5p/000d60/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) >> TBsHuge::Simple [GOOD] >> TBsHuge::SimpleErasureNone >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed [GOOD] >> Mvp::OpenIdConnectAllowedHostsList >> Mvp::OpenIdConnectAllowedHostsList [GOOD] >> Mvp::OpenIdConnectHandleNullResponseFromProtectedResource [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie >> TBlobStorageHullFresh::AppendixPerf [GOOD] >> TBlobStorageHullFresh::AppendixPerf_Tune >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> PushdownTest::StringFieldsNotSupported2 [GOOD] Test command err: Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (Bool '"true") $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) 2024-11-21 23:03:30.962 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:30.991 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:30.997 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_generic_io_discovery.cpp:55: discovered cluster name: test_cluster 2024-11-21 23:03:31.005 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_generic_load_meta.cpp:83: Loading table meta for: `test_cluster`.`test_table` 2024-11-21 23:03:31.016 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:31.048 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:31.053 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:31.061 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (Bool '"true")) (let $2 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($5) $1))) (let $3 (DataSink '"result")) (let $4 (ResWrite! (Left! $2) $3 (Key) (FlatMap (Right! $2) (lambda '($6) (OptionalIf $1 $6))) '('('type)))) (return (Commit! $4 $3)) ) 2024-11-21 23:03:31.066 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_co_simple1.cpp:978: OptionalIf over Bool 'true 2024-11-21 23:03:31.069 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:31.070 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:31.072 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:31.073 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_co_simple1.cpp:2031: FlatMap with Just 2024-11-21 23:03:31.075 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:31.076 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:31.095 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_optimize.cpp:135: PhysicalOptimizer-TrimReadWorld 2024-11-21 23:03:31.097 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) 2024-11-21 23:03:31.098 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) 2024-11-21 23:03:31.116 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [RESULT] yql_result_provider.cpp:771: ResPull 2024-11-21 23:03:31.118 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2024-11-21 23:03:31.121 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2024-11-21 23:03:31.125 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2024-11-21 23:03:31.128 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_generic_dq_integration.cpp:131: Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42 2024-11-21 23:03:31.153 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_optimize.cpp:135: BuildGenericDqSourceSettings 2024-11-21 23:03:31.155 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Built settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($32) (Bool '"true")))) (let $4 (DataType 'Bool)) (let $5 (DataType 'Date)) (let $6 (DataType 'Datetime)) (let $7 (DataType 'Double)) (let $8 (DataType 'DyNumber)) (let $9 (DataType 'Float)) (let $10 (DataType 'Int16)) (let $11 (DataType 'Int32)) (let $12 (DataType 'Int64)) (let $13 (DataType 'Int8)) (let $14 (DataType 'Interval)) (let $15 (DataType 'Json)) (let $16 (DataType 'JsonDocument)) (let $17 (DataType 'String)) (let $18 (DataType 'Timestamp)) (let $19 (DataType 'TzDate)) (let $20 (DataType 'TzDatetime)) (let $21 (DataType 'TzTimestamp)) (let $22 (DataType 'Uint16)) (let $23 (DataType 'Uint32)) (let $24 (DataType 'Uint64)) (let $25 (DataType 'Uint8)) (let $26 (DataType 'Utf8)) (let $27 (DataType 'Uuid)) (let $28 (DataType 'Yson)) (let $29 (StructType '('"col_bool" $4) '('"col_date" $5) '('"col_datetime" $6) '('"col_double" $7) '('"col_dynumber" $8) '('"col_float" $9) '('"col_int16" $10) '('"col_int32" $11) '('"col_int64" $12) '('"col_int8" $13) '('"col_interval" $14) '('"col_json" $15) '('"col_json_document" $16) '('"col_optional_bool" (OptionalType $4)) '('"col_optional_date" (OptionalType $5)) '('"col_optional_datetime" (OptionalType $6)) '('"col_optional_double" (OptionalType $7)) '('"col_optional_dynumber" (OptionalType $8)) '('"col_optional_float" (OptionalType $9)) '('"col_optional_int16" (OptionalType $10)) '('"col_optional_int32" (OptionalType $11)) '('"col_optional_int64" (OptionalType $12)) '('"col_optional_int8" (OptionalType $13)) '('"col_optional_interval" (OptionalType $14)) '('"col_optional_json" (OptionalType $15)) '('"col_optional_json_document" (OptionalType $16)) '('"col_optional_string" (OptionalType $17)) '('"col_optional_timestamp" (OptionalType $18)) '('"col_optional_tz_date" (OptionalType $19)) '('"col_optional_tz_datetime" (OptionalType $20)) '('"col_optional_tz_timestamp" (OptionalType $21)) '('"col_optional_uint16" (OptionalType $22) ... optional_date" (OptionalType $5)) '('"col_optional_datetime" (OptionalType $6)) '('"col_optional_double" (OptionalType $7)) '('"col_optional_dynumber" (OptionalType $8)) '('"col_optional_float" (OptionalType $9)) '('"col_optional_int16" (OptionalType $10)) '('"col_optional_int32" (OptionalType $11)) '('"col_optional_int64" (OptionalType $12)) '('"col_optional_int8" (OptionalType $13)) '('"col_optional_interval" (OptionalType $14)) '('"col_optional_json" (OptionalType $15)) '('"col_optional_json_document" (OptionalType $16)) '('"col_optional_string" (OptionalType $17)) '('"col_optional_timestamp" (OptionalType $18)) '('"col_optional_tz_date" (OptionalType $19)) '('"col_optional_tz_datetime" (OptionalType $20)) '('"col_optional_tz_timestamp" (OptionalType $21)) '('"col_optional_uint16" (OptionalType $22)) '('"col_optional_uint32" (OptionalType $23)) '('"col_optional_uint64" (OptionalType $24)) '('"col_optional_uint8" (OptionalType $25)) '('"col_optional_utf8" (OptionalType $26)) '('"col_optional_uuid" (OptionalType $27)) '('"col_optional_yson" (OptionalType $28)) '('"col_string" $17) '('"col_timestamp" $18) '('"col_tz_date" $19) '('"col_tz_datetime" $20) '('"col_tz_timestamp" $21) '('"col_uint16" $22) '('"col_uint32" $23) '('"col_uint64" $24) '('"col_uint8" $25) '('"col_utf8" $26) '('"col_uuid" $27) '('"col_yson" $28))) (let $30 (DqSourceWrap $3 (DataSource '"generic" '"test_cluster") $29)) (let $31 (ResWrite! world $1 (Key) (FlatMap $30 (lambda '($33) (OptionalIf (Coalesce (== (Member $33 '"col_utf8") (Member $33 '"col_optional_utf8")) (Bool '"false")) $33))) '('('type)))) (return (Commit! $31 $1)) ) Dq source filter settings: 2024-11-21 23:03:36.767 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_generic_settings.cpp:38: GenericConfiguration::AddCluster: name = test_cluster, kind = POSTGRESQL, database name = database, database id = , endpoint = { host: "host" port: 42 }, use tls = 0, protocol = NATIVE Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (!= (Member $row '"col_string") (String '"value") ) $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) 2024-11-21 23:03:36.769 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:36.771 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:36.772 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_generic_io_discovery.cpp:55: discovered cluster name: test_cluster 2024-11-21 23:03:36.772 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_generic_load_meta.cpp:83: Loading table meta for: `test_cluster`.`test_table` 2024-11-21 23:03:36.774 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:36.776 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:36.776 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:36.777 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2024-11-21 23:03:36.779 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_optimize.cpp:135: PhysicalOptimizer-TrimReadWorld 2024-11-21 23:03:36.779 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($3) (Bool '"true")))) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $2 $1)) ) 2024-11-21 23:03:36.780 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($3) (Bool '"true")))) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $2 $1)) ) 2024-11-21 23:03:36.781 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") '"test_table" (Void) (lambda '($3) (Bool '"true")))) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $2 $1)) ) 2024-11-21 23:03:36.782 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_generic_dq_integration.cpp:131: Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42 2024-11-21 23:03:36.791 INFO yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [generic] yql_optimize.cpp:135: BuildGenericDqSourceSettings 2024-11-21 23:03:36.793 DEBUG yql-providers-generic-provider-ut-pushdown(pid=37987, tid=0x00007F901538ABC0) [core] yql_out_transformers.cpp:62: Built settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($32) (Bool '"true")))) (let $4 (DataType 'Bool)) (let $5 (DataType 'Date)) (let $6 (DataType 'Datetime)) (let $7 (DataType 'Double)) (let $8 (DataType 'DyNumber)) (let $9 (DataType 'Float)) (let $10 (DataType 'Int16)) (let $11 (DataType 'Int32)) (let $12 (DataType 'Int64)) (let $13 (DataType 'Int8)) (let $14 (DataType 'Interval)) (let $15 (DataType 'Json)) (let $16 (DataType 'JsonDocument)) (let $17 (DataType 'String)) (let $18 (DataType 'Timestamp)) (let $19 (DataType 'TzDate)) (let $20 (DataType 'TzDatetime)) (let $21 (DataType 'TzTimestamp)) (let $22 (DataType 'Uint16)) (let $23 (DataType 'Uint32)) (let $24 (DataType 'Uint64)) (let $25 (DataType 'Uint8)) (let $26 (DataType 'Utf8)) (let $27 (DataType 'Uuid)) (let $28 (DataType 'Yson)) (let $29 (StructType '('"col_bool" $4) '('"col_date" $5) '('"col_datetime" $6) '('"col_double" $7) '('"col_dynumber" $8) '('"col_float" $9) '('"col_int16" $10) '('"col_int32" $11) '('"col_int64" $12) '('"col_int8" $13) '('"col_interval" $14) '('"col_json" $15) '('"col_json_document" $16) '('"col_optional_bool" (OptionalType $4)) '('"col_optional_date" (OptionalType $5)) '('"col_optional_datetime" (OptionalType $6)) '('"col_optional_double" (OptionalType $7)) '('"col_optional_dynumber" (OptionalType $8)) '('"col_optional_float" (OptionalType $9)) '('"col_optional_int16" (OptionalType $10)) '('"col_optional_int32" (OptionalType $11)) '('"col_optional_int64" (OptionalType $12)) '('"col_optional_int8" (OptionalType $13)) '('"col_optional_interval" (OptionalType $14)) '('"col_optional_json" (OptionalType $15)) '('"col_optional_json_document" (OptionalType $16)) '('"col_optional_string" (OptionalType $17)) '('"col_optional_timestamp" (OptionalType $18)) '('"col_optional_tz_date" (OptionalType $19)) '('"col_optional_tz_datetime" (OptionalType $20)) '('"col_optional_tz_timestamp" (OptionalType $21)) '('"col_optional_uint16" (OptionalType $22)) '('"col_optional_uint32" (OptionalType $23)) '('"col_optional_uint64" (OptionalType $24)) '('"col_optional_uint8" (OptionalType $25)) '('"col_optional_utf8" (OptionalType $26)) '('"col_optional_uuid" (OptionalType $27)) '('"col_optional_yson" (OptionalType $28)) '('"col_string" $17) '('"col_timestamp" $18) '('"col_tz_date" $19) '('"col_tz_datetime" $20) '('"col_tz_timestamp" $21) '('"col_uint16" $22) '('"col_uint32" $23) '('"col_uint64" $24) '('"col_uint8" $25) '('"col_utf8" $26) '('"col_uuid" $27) '('"col_yson" $28))) (let $30 (DqSourceWrap $3 (DataSource '"generic" '"test_cluster") $29)) (let $31 (ResWrite! world $1 (Key) (FlatMap $30 (lambda '($33) (OptionalIf (!= (Member $33 '"col_string") (String '"value")) $33))) '('('type)))) (return (Commit! $31 $1)) ) Dq source filter settings: >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] |78.1%| [TS] {asan, default-linux-x86_64, release} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/mrrun/mrrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__free_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__memalign_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__realloc_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/mrrun/mrrun |78.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/codecs/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/metrics/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.1%| [TS] {RESULT} ydb/core/client/metadata/ut/unittest |78.1%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test |78.1%| [TS] {RESULT} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest |78.1%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test |78.1%| [LD] {RESULT} $(B)/ydb/library/yql/tools/mrrun/mrrun |78.1%| [TS] {RESULT} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest |78.1%| [TS] {RESULT} ydb/mvp/core/ut/unittest |78.1%| [TS] {RESULT} ydb/core/io_formats/arrow/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/oidc_proxy/ut/unittest >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] Test command err: 2024-11-21T23:03:31.967810Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:31.968479Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T23:03:32.116367Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:32.116577Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T23:03:32.192840Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:32.193065Z :MVP DEBUG: Incoming response for protected resource: 204 2024-11-21T23:03:32.344342Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:32.344503Z :MVP DEBUG: Incoming response for protected resource: 204 2024-11-21T23:03:32.471291Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:32.473020Z :MVP DEBUG: Incoming response for protected resource: 204 2024-11-21T23:03:32.605055Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:32.606021Z :MVP DEBUG: Incoming response for protected resource: 204 2024-11-21T23:03:32.930473Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:32.933191Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:32.940462Z :MVP DEBUG: Incoming response for protected resource: 400 2024-11-21T23:03:32.940601Z :MVP DEBUG: Try to send request to HTTPS port 2024-11-21T23:03:32.940775Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:32.947874Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T23:03:33.069881Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:33.069964Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:33.070212Z :MVP DEBUG: Incoming response for protected resource: 400 2024-11-21T23:03:33.580605Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:33.580956Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:33.582617Z :MVP DEBUG: Incoming response for protected resource: 307 2024-11-21T23:03:33.664307Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:33.664665Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:33.667900Z :MVP DEBUG: Incoming response for protected resource: 302 2024-11-21T23:03:33.715704Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:33.715778Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:33.716939Z :MVP DEBUG: Incoming response for protected resource: 302 2024-11-21T23:03:33.751481Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:33.751830Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:33.754967Z :MVP DEBUG: Incoming response for protected resource: 302 2024-11-21T23:03:33.806550Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:33.806886Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:33.808474Z :MVP DEBUG: Incoming response for protected resource: 302 2024-11-21T23:03:33.861810Z :MVP DEBUG: Start OIDC process 2024-11-21T23:03:33.887477Z :MVP DEBUG: Using session cookie (__Host_session_cookie_79632E6F617574682E7964622D766965776572: c2Vz****aWU= (CE0CB168)) 2024-11-21T23:03:33.887972Z :MVP DEBUG: Exchange session token 2024-11-21T23:03:33.888325Z :MVP DEBUG: Getting access token: 200 2024-11-21T23:03:33.888406Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:33.888577Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T23:03:34.366650Z :MVP DEBUG: SessionService.Check(): 401 2024-11-21T23:03:34.906512Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T23:03:34.914292Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T23:03:34.978501Z :MVP DEBUG: SessionService.Create(): OK 2024-11-21T23:03:35.012942Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:35.013223Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:35.014185Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T23:03:35.552303Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T23:03:35.556001Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T23:03:35.566470Z :MVP DEBUG: SessionService.Create(): OK 2024-11-21T23:03:35.581763Z :MVP DEBUG: SessionService.Check(): OK 2024-11-21T23:03:35.581830Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:35.582064Z :MVP DEBUG: Incoming response for protected resource: 200 2024-11-21T23:03:35.641510Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2024-11-21T23:03:36.040927Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2024-11-21T23:03:36.321278Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T23:03:36.426474Z :MVP DEBUG: SessionService.Create(): 401 2024-11-21T23:03:36.489934Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T23:03:36.660649Z :MVP DEBUG: SessionService.Create(): 400 2024-11-21T23:03:36.736814Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T23:03:36.927601Z :MVP DEBUG: SessionService.Create(): 400 2024-11-21T23:03:36.986085Z :MVP DEBUG: Incoming response from authorization server: 200 2024-11-21T23:03:37.130480Z :MVP DEBUG: SessionService.Create(): 412 2024-11-21T23:03:37.368268Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T23:03:37.375149Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T23:03:37.385962Z :MVP DEBUG: SessionService.Check(): 400 2024-11-21T23:03:37.426625Z :MVP DEBUG: Forward user request bypass OIDC 2024-11-21T23:03:37.426851Z :MVP DEBUG: Can not process request to protected resource: GET /counters HTTP/1.1 Host: ydb.viewer.page Accept: */* Accept-Encoding: deflate Authorization: 2024-11-21T23:03:37.484991Z :MVP DEBUG: Restore oidc context failed: Cannot find cookie ydb_oidc_cookie 2024-11-21T23:03:37.636852Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest |78.1%| [TS] {RESULT} ydb/core/blobstorage/vdisk/defrag/ut/unittest |78.2%| [TS] {RESULT} ydb/core/ymq/base/ut/unittest |78.2%| [TS] {RESULT} ydb/library/yaml_config/ut/unittest |78.2%| [TM] {RESULT} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |78.2%| [TM] {RESULT} ydb/core/blobstorage/ut_group/unittest |78.2%| [TA] {RESULT} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.2%| [TS] {RESULT} ydb/mvp/oidc_proxy/ut/unittest |78.2%| [TS] {RESULT} ydb/library/yql/providers/s3/common/ut/unittest |78.2%| [TS] {RESULT} ydb/core/fq/libs/test_connection/ut/unittest |78.2%| [TS] {RESULT} ydb/library/yql/providers/s3/actors/ut/unittest |78.2%| [TS] {RESULT} ydb/library/yql/providers/s3/provider/ut/unittest |78.2%| [TS] {RESULT} ydb/core/external_sources/ut/unittest |78.2%| [TS] {RESULT} ydb/core/external_sources/object_storage/inference/ut/gtest |78.2%| [TS] {RESULT} ydb/core/log_backend/ut/unittest |78.2%| [TM] {RESULT} ydb/core/driver_lib/version/ut/unittest |78.2%| [TA] {RESULT} $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.2%| [TA] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsHuge::SimpleErasureNone [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |78.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> TYardTest::TestRedZoneSurvivability [GOOD] >> TYardTest::TestSlay >> TPDiskRaces::DecommitWithInflight [GOOD] >> TPDiskRaces::DecommitWithInflightMock >> ClosedIntervalSet::Union [GOOD] >> ClosedIntervalSet::Difference >> TYardTest::TestSlay [GOOD] >> TYardTest::TestSlayRace >> TYardTest::TestSlayRace [GOOD] |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TYardTest::TestSlayRecreate |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |78.2%| [TS] {RESULT} ydb/tests/library/ut/py3test |78.2%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TYardTest::TestSlayRecreate [GOOD] >> TYardTest::TestSlayLogWriteRaceActor >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |78.2%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |78.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |78.2%| [LD] {RESULT} $(B)/ydb/library/mkql_proto/ut/ydb-library-mkql_proto-ut |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |78.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |78.3%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased >> test.py::test[solomon-BadDownsamplingFill-] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] >> TMiniKQLProtoTest::TestExportUuidType >> TMiniKQLProtoTest::TestExportEmptyListType >> TCacheTest::Recreate >> TMiniKQLProtoTest::TestExportVoidType >> TMiniKQLProtoTest::TestExportUuidType [GOOD] >> TMiniKQLProtoTest::TestExportVariantTupleType >> TMiniKQLProtoTest::TestCanExport >> TMiniKQLProtoTest::TestExportTupleType >> TMiniKQLProtoTest::TestExportDecimalType >> TMiniKQLProtoTest::TestExportEmptyListType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyDictType >> TMiniKQLProtoTest::TestExportVoidType [GOOD] >> TMiniKQLProtoTest::TestExportVoid >> TMiniKQLProtoTest::TestExportTupleType [GOOD] >> TMiniKQLProtoTest::TestExportStructType >> TMiniKQLProtoTest::TestExportVariantTupleType [GOOD] >> TMiniKQLProtoTest::TestExportVariantStructType >> TMiniKQLProtoTest::TestExportListType >> TMiniKQLProtoTest::TestExportDecimalType [GOOD] >> TMiniKQLProtoTest::TestExportDictType >> TCacheTest::Navigate >> TMiniKQLProtoTest::TestExportNullType >> TMiniKQLProtoTest::TestExportEmptyDictType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyOptional >> TMiniKQLProtoTest::TestExportVoid [GOOD] >> TMiniKQLProtoTest::TestImportDecimal >> TMiniKQLProtoTest::TestExportVariantStructType [GOOD] >> TMiniKQLProtoTest::TestExportUuid >> TCacheTest::List >> TCacheTest::MigrationCommon >> TCacheTestWithDrops::LookupErrorUponEviction >> TMiniKQLProtoTest::TestExportListType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyTupleType >> TCacheTest::MigrationLostMessage >> TMiniKQLProtoTest::TestExportStructType [GOOD] >> TMiniKQLProtoTest::TestExportTuple >> TMiniKQLProtoTest::TestCanExport [GOOD] >> TMiniKQLProtoTest::TestExportDataType >> TMiniKQLProtoTest::TestExportTuple [GOOD] >> TMiniKQLProtoTest::TestExportStructEmptyColumnOrder >> TMiniKQLProtoTest::TestExportDictType [GOOD] >> TMiniKQLProtoTest::TestExportDouble >> TMiniKQLProtoTest::TestExportNullType [GOOD] >> TMiniKQLProtoTest::TestExportOptionalType >> TPDiskRaces::DecommitWithInflightMock [GOOD] >> TPDiskRaces::KillOwnerWhileDecommitting >> TMiniKQLProtoTest::TestExportDecimalNegative >> TPDiskUtil::DriveEstimator [GOOD] >> TPDiskUtil::OffsetParsingCorrectness >> TMiniKQLProtoTest::TestExportStructEmptyColumnOrder [GOOD] >> TMiniKQLProtoTest::TestExportStructWithColumnOrder >> TMiniKQLProtoTest::TestExportUuid [GOOD] >> TMiniKQLProtoTest::TestExportVariant >> TMiniKQLProtoTest::TestExportEmptyOptional [GOOD] >> TMiniKQLProtoTest::TestExportEmptyOptionalOptional >> TMiniKQLProtoTest::TestExportStructWithColumnOrder [GOOD] >> TMiniKQLProtoTest::TestImportDecimal [GOOD] >> TMiniKQLProtoTest::TestImportVariant >> TMiniKQLProtoTest::TestExportDataType [GOOD] >> TMiniKQLProtoTest::TestExportBool >> TCacheTest::RacyCreateAndSync >> TMiniKQLProtoTest::TestExportDouble [GOOD] >> TMiniKQLProtoTest::TestExportEmptyDict >> TPDiskUtil::OffsetParsingCorrectness [GOOD] >> TMiniKQLProtoTest::TestExportOptionalType [GOOD] >> TMiniKQLProtoTest::TestExportOptionalOptionalType >> TMiniKQLProtoTest::TestExportVariant [GOOD] >> TMiniKQLProtoTest::TestImportVariant [GOOD] >> TMiniKQLProtoTest::TestImportUuid >> TPDiskUtil::FormatSectorMap >> TCacheTest::Recreate [GOOD] >> TCacheTest::RacyRecreateAndSync >> TMiniKQLProtoTest::TestExportEmptyTupleType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyStructType >> TCacheTest::Attributes >> TCacheTest::TableSchemaVersion >> TMiniKQLProtoTest::TestExportDecimalNegative [GOOD] >> TMiniKQLProtoTest::TestExportDecimalMax64bit >> TMiniKQLProtoTest::TestImportUuid [GOOD] >> TMiniKQLProtoTest::TestExportOptionalOptionalType [GOOD] >> TMiniKQLProtoTest::TestExportEmptyOptionalOptional [GOOD] >> TMiniKQLProtoTest::TestExportEmptyList >> TMiniKQLProtoTest::TestExportNull >> TMiniKQLProtoTest::TestExportEmptyDict [GOOD] >> TMiniKQLProtoTest::TestExportBool [GOOD] >> TMiniKQLProtoTest::TestExportDecimal >> TMiniKQLProtoTest::TestExportPgType >> TMiniKQLProtoTest::TestExportDict >> TCacheTest::SystemView >> TMiniKQLProtoTest::TestExportDecimalMax64bit [GOOD] >> TMiniKQLProtoTest::TestExportDecimalHugePlusOne >> TPDiskUtil::FormatSectorMap [GOOD] >> TMiniKQLProtoTest::TestExportDict [GOOD] >> TMiniKQLProtoTest::TestExportEmptyStructType [GOOD] >> TMiniKQLProtoTest::TestExportIntegral >> TMiniKQLProtoTest::TestExportDecimalHugePlusOne [GOOD] >> TMiniKQLProtoTest::TestExportDecimalNan >> TMiniKQLProtoTest::TestExportEmptyList [GOOD] >> TMiniKQLProtoTest::TestExportNull [GOOD] >> TMiniKQLProtoTest::TestExportOptional >> TMiniKQLProtoTest::TestExportDecimal [GOOD] >> TMiniKQLProtoTest::TestExportDecimalHuge >> TMiniKQLProtoTest::TestExportPgType [GOOD] >> TMiniKQLProtoTest::TestExportOptionalType2 >> TMiniKQLProtoTest::TestExportOptional [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestImportUuid [GOOD] >> TMiniKQLProtoTest::TestExportIntegral [GOOD] >> TMiniKQLProtoTest::TestExportList |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportStructWithColumnOrder [GOOD] >> TMiniKQLProtoTest::TestExportDecimalNan [GOOD] >> TMiniKQLProtoTest::TestExportDecimalMunusInf >> TCacheTest::List [GOOD] >> TCacheTest::CheckSystemViewAccess >> TMiniKQLProtoTest::TestExportOptionalType2 [GOOD] >> TMiniKQLProtoTest::TestExportString |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> TMiniKQLProtoTest::TestExportDecimalHuge [GOOD] |78.3%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.3%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> TCacheTest::Navigate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskUtil::FormatSectorMap [GOOD] Test command err: Path# /home/runner/.ya/build/build_root/dl5p/001a46/r3tmp/tmpFfENgb//pdisk/data.bin >> TCacheTest::MigrationUndo >> TMiniKQLProtoTest::TestExportList [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportVariant [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportEmptyList [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportDict [GOOD] >> TMiniKQLProtoTest::TestExportString [GOOD] >> TMiniKQLProtoTest::TestExportStruct >> TCacheTest::RacyRecreateAndSync [GOOD] >> TMiniKQLProtoTest::TestExportDecimalMunusInf [GOOD] >> TCacheTest::SystemView [GOOD] >> TCacheTest::SysLocks >> TMiniKQLProtoTest::TestExportStruct [GOOD] >> TMiniKQLProtoTest::TestExportStructColumnOrderAffectsTopLevelOnly >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationCommit >> TMiniKQLProtoTest::TestExportStructColumnOrderAffectsTopLevelOnly [GOOD] >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::PathBelongsToDomain |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportOptional [GOOD] >> TCacheTest::CheckSystemViewAccess [GOOD] >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess >> TCacheTest::SysLocks [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportDecimalHuge [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportList [GOOD] >> TCacheTest::TableSchemaVersion [GOOD] >> TCacheTest::WatchRoot |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportStructColumnOrderAffectsTopLevelOnly [GOOD] >> TCacheTest::CheckAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyRecreateAndSync [GOOD] Test command err: 2024-11-21T23:04:07.621088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:07.621289Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:08.328603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:04:08.397509Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T23:04:08.416584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:04:08.426023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:04:08.476915Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2024-11-21T23:04:11.476974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:11.477311Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:11.825426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:04:11.859894Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T23:04:11.863383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:04:11.918070Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:04:11.999035Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationDeletedPathNavigate >> TCacheTest::PathBelongsToDomain [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/mkql_proto/ut/unittest >> TMiniKQLProtoTest::TestExportDecimalMunusInf [GOOD] >> TCacheTest::MigrationCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2024-11-21T23:04:11.137722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:11.137875Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:11.949592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2024-11-21T23:04:13.612470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:13.612524Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:13.824721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TCacheTest::WatchRoot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2024-11-21T23:04:09.273621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:09.273823Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:10.344909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2024-11-21T23:04:10.804955Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T23:04:10.832789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:04:10.833434Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T23:04:12.780638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:12.780707Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:13.007808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2024-11-21T23:04:13.012641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:04:13.025858Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T23:04:13.034439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2024-11-21T23:04:13.069247Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:202:2193], for# user1@builtin, access# DescribeSchema 2024-11-21T23:04:13.084522Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:208:2199], for# user1@builtin, access# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] Test command err: 2024-11-21T23:04:12.189366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:12.189730Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:13.008531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:04:13.101070Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2024-11-21T23:04:14.283249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:14.283298Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:14.430225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T23:04:14.466081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2024-11-21T23:04:14.481115Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:194:2185], for# user1@builtin, access# DescribeSchema 2024-11-21T23:04:14.481543Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:198:2189], for# user1@builtin, access# DescribeSchema >> TCacheTest::MigrationUndo [GOOD] |78.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |78.4%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> TYardTest::TestSlayLogWriteRaceActor [GOOD] >> TYardTest::TestStartingPointReboots >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2024-11-21T23:04:11.200567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:11.200622Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:12.389354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:04:12.596371Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2024-11-21T23:04:14.448128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:14.448362Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:14.758216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2024-11-21T23:04:14.789454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:04:14.803514Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2024-11-21T23:04:14.837590Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:223:2202], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:04:14.838208Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:225:2204], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2024-11-21T23:04:08.804463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:08.804532Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:09.285418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2024-11-21T23:04:09.312573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:171:2168], Recipient [1:68:2107]: NActors::TEvents::TEvPoison 2024-11-21T23:04:09.313216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:172:2067] recipient: [1:45:2092] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:175:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:176:2067] recipient: [1:174:2169] Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:178:2067] recipient: [1:174:2169] 2024-11-21T23:04:09.328154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:04:09.382489Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:04:09.383221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:174:2169], Recipient [1:177:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:04:09.412172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:09.412479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:09.412557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:09.412660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:09.412784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:09.412895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:09.413071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:09.414936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:09.452214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:09.453382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:09.453578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:09.453770Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:177:2170]: TSystem::Undelivered 2024-11-21T23:04:09.453811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2024-11-21T23:04:09.453841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:09.453895Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:09.454123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:09.454867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:09.454974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.455056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.455436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.455642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.455759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.455845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.455944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.456031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.456157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.456467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.456587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.456941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.457017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.457169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.457250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.457337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.457520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.457597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.457728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.457945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.458064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.458113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.458154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.458565Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:04:09.459925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:04:09.460574Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [1:177:2170], Recipient [1:177:2170]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T23:04:09.460612Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T23:04:09.461207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:09.461253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:09.461442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:09.461543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:09.461587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:09.461616Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:04:09.461746Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:192:2170], Recipient [1:177:2170]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T23:04:09.461781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T23:04:09.461816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:207:2067] recipient: [1:24:2071] 2024-11-21T23:04:09.487094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:206:2187], Recipient [1:177:2170]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2024-11-21T23:04:09.487140Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:04:09.767771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:09.770329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.772098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:04:09.772651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T23:04:09.773684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:04:09.774554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:09.774705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:04:09.775129Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:04:09.775398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:09.775542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:04:09.777079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678 ... 4-11-21T23:04:12.107682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2024-11-21T23:04:12.108097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:04:12.108278Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:04:12.109025Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:415:2333], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:04:12.109768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:04:12.110264Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:04:13.828395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:13.828456Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:14.057499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:174:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:175:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:175:2169] 2024-11-21T23:04:14.210513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:14.210575Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2024-11-21T23:04:14.260219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T23:04:14.280644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:245:2067] recipient: [2:239:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:245:2067] recipient: [2:239:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:249:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:254:2067] recipient: [2:239:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2024-11-21T23:04:14.383533Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:285:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:286:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2024-11-21T23:04:14.432821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:335:2067] recipient: [2:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:335:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:338:2286] sender: [2:339:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [2:338:2286] sender: [2:340:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T23:04:14.713046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:413:2067] recipient: [2:408:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:413:2067] recipient: [2:408:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:416:2333] sender: [2:417:2067] recipient: [2:408:2329] 2024-11-21T23:04:14.901501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:14.901565Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:416:2333] sender: [2:443:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 106 2024-11-21T23:04:15.031652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:04:15.031725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:04:15.032080Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:15.032217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:04:15.075197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2024-11-21T23:04:15.075575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:503:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:506:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:508:2067] recipient: [2:507:2404] Leader for TabletID 72057594046678944 is [2:509:2405] sender: [2:510:2067] recipient: [2:507:2404] 2024-11-21T23:04:15.289144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:15.289205Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:509:2405] sender: [2:536:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::WatchRoot [GOOD] Test command err: 2024-11-21T23:04:11.197373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:11.197431Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:12.928444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2024-11-21T23:04:13.072961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T23:04:14.086875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 2024-11-21T23:04:15.313023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:15.313087Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:15.443625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:04:15.479074Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T23:04:15.481246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:04:15.504649Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] |78.4%| [TA] $(B)/ydb/library/mkql_proto/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2024-11-21T23:04:09.970003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:09.970109Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:11.257783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:04:11.353872Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2024-11-21T23:04:13.063859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:13.064030Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:13.366733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2024-11-21T23:04:13.386570Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:171:2168], Recipient [2:68:2107]: NActors::TEvents::TEvPoison 2024-11-21T23:04:13.387277Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:174:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:175:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:175:2169] 2024-11-21T23:04:13.400174Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [2:175:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:04:13.460144Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [2:175:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:04:13.460664Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [2:175:2169], Recipient [2:177:2170]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:04:13.499176Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:13.499551Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:13.499739Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:13.500037Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:13.500469Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:13.500497Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:13.500872Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:13.504947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:13.753797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:13.775652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:13.775859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:13.777234Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [2:7238242728502259555:7369577], Recipient [2:177:2170]: TSystem::Undelivered 2024-11-21T23:04:13.777272Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2024-11-21T23:04:13.777392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:13.777509Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:13.778190Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:13.795900Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:13.796271Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.796445Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.797893Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.799052Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.799538Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.799902Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.800564Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.800989Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.801604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.822982Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.823543Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.830987Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.831196Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.832021Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.832334Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.832584Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.833635Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.833910Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.834250Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.834799Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.834918Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.835255Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.835470Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:13.836422Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:04:13.856153Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:04:13.866492Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [2:177:2170], Recipient [2:177:2170]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T23:04:13.866554Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T23:04:13.868629Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:13.868904Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:13.869281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:13.869397Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:13.869521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:13.869655Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:04:13.890578Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:192:2170], Recipient [2:177:2170]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T23:04:13.890658Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T23:04:13.890807Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2024-11-21T23:04:13.920732Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [2:206:2187], Recipient [2:177:2170]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2024-11-21T23:04:13.920917Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:04:14.362980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:14.363232Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:04:14.363330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:04:14.363499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target pa ... ype: Coordinator, at schemeshard: 72057594046678944 2024-11-21T23:04:16.691800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:04:16.691828Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: Mediator, at schemeshard: 72057594046678944 2024-11-21T23:04:16.691847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:04:16.691876Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2024-11-21T23:04:16.691899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T23:04:16.698579Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:16.698786Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.698958Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2024-11-21T23:04:16.699227Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.699334Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.699652Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.699740Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.699947Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700075Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700147Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700290Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700361Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700511Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700689Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700781Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700821Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.700914Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.701154Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:04:16.712851Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:04:16.718188Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435079, Sender [2:508:2398], Recipient [2:508:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2024-11-21T23:04:16.745509Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2024-11-21T23:04:16.746276Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:16.746341Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:16.746591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:16.746655Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:16.746694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:16.746740Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:04:16.746883Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:523:2398], Recipient [2:508:2398]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T23:04:16.746914Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2024-11-21T23:04:16.746948Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:16.760727Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:04:16.761644Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:375:2315] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:04:16.779212Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:535:2415], recipient# [2:534:2414], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:04:16.780476Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:04:16.780603Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:384:2318] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:04:16.780784Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:537:2417], recipient# [2:536:2416], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:04:16.781169Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:157:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:04:16.781290Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:157:2155], cacheItem# { Subscriber: { Subscriber: [2:393:2321] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 250 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:04:16.781480Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:539:2419], recipient# [2:538:2418], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) } }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] Test command err: 2024-11-21T23:03:36.614996Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:823:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615028Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:579:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615055Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:619:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615105Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:521:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615124Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:98:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615165Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:546:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615189Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:472:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615225Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:764:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615258Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:958:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.615281Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:672:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616583Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:274:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616618Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:779:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616636Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:371:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616654Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:652:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616678Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:502:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616707Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:36:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616746Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:585:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616762Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:837:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616781Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:720:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.616802Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:118:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617788Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:584:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617818Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:511:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617834Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:171:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617855Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:983:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617871Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:890:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617896Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:30:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617916Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:551:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617950Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:614:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.617971Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:74:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.618002Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:395:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.618904Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:346:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.618934Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:361:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.618954Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:244:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.618978Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:531:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.619004Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:6:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.619029Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:512:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.619050Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:580:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.619076Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:147:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.619095Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:794:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.619149Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:847:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.622813Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:963:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.622850Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:613:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.622870Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:594:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.622888Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:997:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.622917Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:711:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.622946Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:861:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.622966Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:157:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.622993Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:205:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623021Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:16:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623069Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:137:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623642Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:852:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623673Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:133:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623705Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:482:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623725Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:516:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623740Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:64:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623761Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:1:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623788Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:750:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623822Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:973:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623853Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:113:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.623876Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:681:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.624581Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:832:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.624607Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:439:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.624638Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:492:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.624668Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:842:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.624690Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:618:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.624726Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:21:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625042Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:322:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625075Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:682:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625093Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:225:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625116Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:89:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625637Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:370:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625658Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:69:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625676Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:162:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625696Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:308:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625724Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:929:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625758Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:895:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625778Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:419:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625797Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:409:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625817Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:108:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.625838Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:706:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626516Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:176:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626538Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:390:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626586Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:341:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626608Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:915:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626625Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:404:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626645Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:857:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626664Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:234:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626715Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:31:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626744Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:866:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.626784Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:609:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627245Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:667:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627269Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:900:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627290Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:541:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627306Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:161:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627322Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:701:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627352Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:871:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627379Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:725:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627405Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:774:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627420Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:905:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.627445Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:939:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628139Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:45:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628158Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:210:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628176Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:337:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628194Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:200:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628213Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:448:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628231Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:239:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628248Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:434:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628285Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:954:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628306Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:298:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.628328Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:949:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629585Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:103:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629619Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:264:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629640Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:506:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629656Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:696:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629675Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:438:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629696Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:424:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629726Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:716:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629748Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:648:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629767Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:191:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.629800Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:229:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630621Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:376:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630652Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:891:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630673Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:65:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630692Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:784:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630712Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:380:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630743Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:924:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630764Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:653:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630790Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:195:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630815Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:862:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.630837Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:920:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.631355Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:99:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.631380Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:463:0:0:66560:1] Marker# BSVS08 2024-11-21T23:03:36.631407Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:278:0:0:66560:1] Marker# BSVS08 |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2024-11-21T23:04:09.122053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:09.122207Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:10.446000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] Test command err: testing erasure block-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 71 us testing erasure stripe-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 1249847 us testing erasure block-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 3672069 us testing erasure stripe-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 992173 us testing erasure stripe-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 3505022 us testing erasure stripe-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 7723201 us >> TNetClassifierTest::TestInitFromFile >> TNetClassifierTest::TestInitFromBadlyFormattedFile |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> TProxyActorTest::TestCreateSemaphore |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession >> TProxyActorTest::TestCreateSemaphore [GOOD] |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TPDiskRaces::KillOwnerWhileDecommitting [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] >> TProxyActorTest::TestAttachSession [GOOD] |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/formats/arrow/ut/unittest >> Dictionary::Simple 2024-11-21 23:04:25,773 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2024-11-21 23:04:25,932 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 37286 75.5M 75.6M 21.6M test_tool run_ut @/home/runner/.ya/build/build_root/dl5p/000e88/ydb/core/formats/arrow/ut/test-results/unittest/testing_out_stuff/test_tool.args 37566 260M 261M 204M └─ ydb-core-formats-arrow-ut --trace-path-append /home/runner/.ya/build/build_root/dl5p/000e88/ydb/core/formats/arrow/ut/test-results/unittest/ytest.report.trace +ArrowTest: Test command err: Process: Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7480276291/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/dl5p/000e88/ydb/core/formats/arrow/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7480276291/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/dl5p/000e88/ydb/core/formats/arrow/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |78.5%| [TA] {RESULT} $(B)/ydb/library/mkql_proto/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.5%| [TS] {RESULT} ydb/core/formats/arrow/ut/unittest |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] >> TCacheTest::MigrationDeletedPathNavigate [GOOD] |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] |78.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2024-11-21T23:04:10.721101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:10.721147Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:11.909731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:172:2067] recipient: [1:45:2092] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:175:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:68:2107] sender: [1:176:2067] recipient: [1:174:2169] Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:178:2067] recipient: [1:174:2169] 2024-11-21T23:04:12.502822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:12.503055Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:177:2170] sender: [1:207:2067] recipient: [1:24:2071] 2024-11-21T23:04:12.619196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T23:04:12.635062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:243:2067] recipient: [1:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:243:2067] recipient: [1:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:244:2067] recipient: [1:236:2213] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:244:2067] recipient: [1:236:2213] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:245:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:245:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:248:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:248:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409546 is [1:247:2217] sender: [1:250:2067] recipient: [1:234:2211] Leader for TabletID 72075186233409547 is [1:251:2219] sender: [1:252:2067] recipient: [1:236:2213] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2024-11-21T23:04:12.778359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:247:2217] sender: [1:285:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:251:2219] sender: [1:286:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2024-11-21T23:04:12.947176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:335:2067] recipient: [1:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:335:2067] recipient: [1:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:336:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:336:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:338:2286] sender: [1:339:2067] recipient: [1:331:2282] Leader for TabletID 72075186233409548 is [1:338:2286] sender: [1:340:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T23:04:13.770191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:412:2067] recipient: [1:408:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:412:2067] recipient: [1:408:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:413:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:413:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:415:2333] sender: [1:416:2067] recipient: [1:408:2329] Leader for TabletID 72075186233409549 is [1:415:2333] sender: [1:417:2067] recipient: [1:24:2071] 2024-11-21T23:04:14.084068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:14.084120Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 106 2024-11-21T23:04:14.137045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:04:14.137093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:04:14.137437Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:14.137549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:04:14.153809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2024-11-21T23:04:14.153998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T23:04:14.223533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2024-11-21T23:04:14.421768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:613:2067] recipient: [1:608:2496] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:613:2067] recipient: [1:608:2496] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:615:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:615:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:616:2500] sender: [1:617:2067] recipient: [1:608:2496] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 Leader for TabletID 72075186233409550 is [1:616:2500] sender: [1:635:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 110 2024-11-21T23:04:15.584635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:15.584686Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2024-11-21T23:04:15.771468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:172:2067] recipient: [2:45:2092] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:174:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:68:2107] sender: [2:176:2067] recipient: [2:175:2169] Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:178:2067] recipient: [2:175:2169] 2024-11-21T23:04:16.132702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:16.132981Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:177:2170] sender: [2:207:2067] recipient: [2:24:2071] 2024-11-21T23:04:16.253052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T23:04:16.387206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:243:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:245:2067] recipient: [2:239:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:245:2067] recipient: [2:239:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:249:2067] recipient: [2:234:2211] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:254:2067] recipient: [2:239:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2024-11-21T23:04:16.552590Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:246:2217] sender: [2:285:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:251:2219] sender: [2:286:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2024-11-21T23:04:16.637762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:335:2067] recipient: [2:331:2282] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:335:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:336:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:338:2286] sender: [2:339:2067] recipient: [2:331:2282] Leader for TabletID 72075186233409548 is [2:338:2286] sender: [2:340:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T23:04:16.863758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:413:2067] recipient: [2:408:2329] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:413:2067] recipient: [2:408:2329] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:415:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:416:2333] sender: [2:417:2067] recipient: [2:408:2329] 2024-11-21T23:04:17.268808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:17.268865Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:416:2333] sender: [2:443:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2024-11-21T23:04:17.431180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:04:17.431244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:04:17.432335Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:17.433126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:04:17.498556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2024-11-21T23:04:17.499346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T23:04:17.617319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:546:2067] recipient: [2:541:2435] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:546:2067] recipient: [2:541:2435] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:547:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:547:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:549:2439] sender: [2:550:2067] recipient: [2:541:2435] Leader for TabletID 72075186233409550 is [2:549:2439] sender: [2:551:2067] recipient: [2:24:2071] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2024-11-21T23:04:19.403286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:04:19.403572Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:19.463561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:04:19.463620Z node 2 :IMPORT WARN: Table profiles were not loaded |78.6%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2024-11-21T23:04:20.187563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872314873097676:2085];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:20.217930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0013de/r3tmp/tmpxMzn5L/pdisk_1.dat 2024-11-21T23:04:23.570620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:24.674579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:25.127029Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872314873097676:2085];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:25.127130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:04:25.159008Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:25.211205Z node 1 :HTTP ERROR: (#30,[::1]:26836) connection closed with error: Connection refused 2024-11-21T23:04:25.259558Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:04:25.314488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:25.315497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:25.415055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:25.554235Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:25.554257Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:25.554263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:25.557624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] |78.6%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> TNetClassifierTest::TestInitFromFile [GOOD] >> KqpScan::ScanRetryRead >> KqpScan::ScanDuringSplit10 >> KqpScan::RemoteShardScan |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2024-11-21T23:04:25.371622Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872338462847169:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:25.371665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00139e/r3tmp/tmpfadjjI/pdisk_1.dat 2024-11-21T23:04:26.930356Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:27.040022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:27.040123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:27.069217Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:27.078022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:27.260865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/00139e/r3tmp/yandexfrZz9t.tmp 2024-11-21T23:04:27.260889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/00139e/r3tmp/yandexfrZz9t.tmp 2024-11-21T23:04:27.261000Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2024-11-21T23:04:27.261024Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/dl5p/00139e/r3tmp/yandexfrZz9t.tmp 2024-11-21T23:04:27.261126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |78.6%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] |78.6%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TDataShardLocksTest::Points_OneTx |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |78.6%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2024-11-21T23:04:25.878874Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872337643168711:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:25.880658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0013bf/r3tmp/tmpg0oZND/pdisk_1.dat 2024-11-21T23:04:27.858659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:28.373954Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:28.381010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:28.381189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:28.397745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:28.559094Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0013bf/r3tmp/yandexoTnoiO.tmp 2024-11-21T23:04:28.559120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0013bf/r3tmp/yandexoTnoiO.tmp 2024-11-21T23:04:28.575851Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0013bf/r3tmp/yandexoTnoiO.tmp 2024-11-21T23:04:28.576014Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> Mirror3of4::ReplicationSmall [GOOD] >> Mirror3of4::ReplicationHuge |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |78.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |78.6%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> VDiskRestart::Simple [GOOD] |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |78.6%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |78.7%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting >> LocalPartitionReader::Simple >> LocalPartitionReader::Simple [GOOD] >> LocalPartitionReader::Booting [GOOD] |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |78.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> LocalPartitionReader::FeedSlowly |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |78.7%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |78.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] >> LocalPartitionReader::FeedSlowly [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |78.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |78.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition |78.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] |78.8%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::MargePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] Test command err: 2024-11-21T23:03:36.674532Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2710} PDiskId# 1 ownerId# 9 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 9 ownerRound# 101 lsn# 18 PDiskId# 1 2024-11-21T23:03:38.414764Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2710} PDiskId# 1 ownerId# 5 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 5 ownerRound# 101 lsn# 12 PDiskId# 1 warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x990 has a premature terminator entry at offset 0x9a0 warning: address range table at offset 0x9c0 has a premature terminator entry at offset 0x9d0 warning: address range table at offset 0x9f0 has a premature terminator entry at offset 0xa00 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0xa50 has a premature terminator entry at offset 0xa60 warning: address range table at offset 0xa80 has a premature terminator entry at offset 0xa90 warning: address range table at offset 0xab0 has a premature terminator entry at offset 0xac0 warning: address range table at offset 0xae0 has a premature terminator entry at offset 0xaf0 warning: address range table at offset 0xb10 has a premature terminator entry at offset 0xb20 warning: address range table at offset 0xb40 has a premature terminator entry at offset 0xb50 warning: address range table at offset 0xb70 has a premature terminator entry at offset 0xb80 warning: address range table at offset 0xba0 has a premature terminator entry at offset 0xbb0 warning: address range table at offset 0xbd0 has a premature terminator entry at offset 0xbe0 warning: address range table at offset 0xc00 has a premature terminator entry at offset 0xc10 warning: address range table at offset 0xc30 has a premature terminator entry at offset 0xc40 warning: address range table at offset 0xc60 has a premature terminator entry at offset 0xc70 warning: address range table at offset 0xc90 has a premature terminator entry at offset 0xca0 warning: address range table at offset 0xcc0 has a premature terminator entry at offset 0xcd0 warning: address range table at offset 0xcf0 has a premature terminator entry at offset 0xd00 warning: address range table at offset 0xd20 has a premature terminator entry at offset 0xd30 ================================================================= ==10053==ERROR: LeakSanitizer: detected memory leaks Direct leak of 25760 byte(s) in 70 object(s) allocated from: #0 0x264168d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x686a8be in NKikimr::NPDisk::TReqCreator::CreateLogWrite(NKikimr::NPDisk::TEvLog&, NActors::TActorId const&, double&, NWilson::TTraceId) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_req_creator.h:244:27 #2 0x685feb6 in NKikimr::NPDisk::TPDiskActor::Handle(TAutoPtr, TDelete>&) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp:810:48 #3 0x685de55 in NKikimr::NPDisk::TPDiskActor::StateOnline(TAutoPtr&) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp:1341:5 #4 0x3a9b856 in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:533:23 #5 0x3a95628 in NActors::TGenericExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:248:28 #6 0x3a9ea84 in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*)::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/library/actors/core/executor_thread.cpp:425:39 #7 0x3a9d98d in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*) /-S/ydb/library/actors/core/executor_thread.cpp:479:13 #8 0x3aa037a in NActors::TExecutorThread::ThreadProc() /-S/ydb/library/actors/core/executor_thread.cpp:510:9 #9 0x2957274 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #10 0x260b648 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 Indirect leak of 1437520 byte(s) in 70 object(s) allocated from: #0 0x260daff in malloc /-S/contrib/libs/clang18-rt/lib/asan/asan_malloc_linux.cpp:68:3 #1 0x596ad06 in y_allocate /-S/util/system/sys_alloc.h:9:15 #2 0x596ad06 in NDetail::TRcBufInternalBackend::Allocate(unsigned long, unsigned long, unsigned long) /-S/ydb/library/actors/util/rc_buf_backend.h:204:49 #3 0x59417f7 in Uninitialized /-S/ydb/library/actors/util/rc_buf_backend.h:144:29 #4 0x59417f7 in TRcBuf::Uninitialized(unsigned long, unsigned long, unsigned long) /-S/ydb/library/actors/util/rc_buf.h:829:36 #5 0x593d97b in TRcBuf::GrowFront(unsigned long, TRcBuf::EResizeStrategy) /-S/ydb/library/actors/util/rc_buf.h:1052:28 #6 0x6f413eb in NKikimr::TPutRecoveryLogRecOpt::SerializeZeroCopy(NKikimr::TBlobStorageGroupType const&, NKikimr::TLogoBlobID const&, TRcBuf&&) /-S/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp:46:14 #7 0x6f4125b in NKikimr::TPutRecoveryLogRecOpt::SerializeZeroCopy(NKikimr::TBlobStorageGroupType const&, NKikimr::TLogoBlobID const&, TRope&&) /-S/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp:38:16 #8 0x6a3c0a5 in std::__y1::pair>, NWilson::TTraceId> NKikimr::TSkeleton::CreatePutLogEvent(NActors::TActorContext const&, TBasicString>, NActors::TActorId, unsigned long, NLWTrace::TOrbit&&, NKikimr::TSkeleton::TVPutInfo&, std::__y1::unique_ptr>) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:458:34 #9 0x6a36f42 in NKikimr::TSkeleton::PrivateHandle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:795:42 #10 0x6a69ca9 in NKikimr::TSkeleton::StateNormal(TAutoPtr&) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:2710:9 #11 0x3a9b856 in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:533:23 #12 0x3a95628 in NActors::TGenericExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:248:28 #13 0x3a9ea84 in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*)::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/library/actors/core/executor_thread.cpp:425:39 #14 0x3a9d98d in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*) /-S/ydb/library/actors/core/executor_thread.cpp:479:13 #15 0x3aa037a in NActors::TExecutorThread::ThreadProc() /-S/ydb/library/actors/core/executor_thread.cpp:510:9 #16 0x2957274 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #17 0x260b648 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 Indirect leak of 6720 byte(s) in 70 object(s) allocated from: #0 0x264168d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x6f58d23 in make_unique > &, NActors::TActorId &, NActors::TActorId &, NActors::TActorId &, std::__y1::unique_ptr >, std::__y1::unique_ptr > > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:686:26 #2 0x6f58d23 in NKikimr::CreateHullUpdate(std::__y1::shared_ptr const&, NKikimr::TLogSignature, TRcBuf const&, NKikimr::TLsnSeg, void*, std::__y1::unique_ptr>, std::__y1::unique_ptr>) /-S/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp:96:25 #3 0x6a3c78f in std::__y1::pair>, NWilson::TTraceId> NKikimr::TSkeleton::CreatePutLogEvent(NActors::TActorContext const&, TBasicString>, NActors::TActorId, unsigned long, NLWTrace::TOrbit&&, NKikimr::TSkeleton::TVPutInfo&, std::__y1::unique_ptr>) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:472:27 #4 0x6a36f42 in NKikimr::TSkeleton::PrivateHandle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:795:42 #5 0x6a69ca9 in NKikimr::TSkeleton::StateNormal(TAutoPtr&) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:2710:9 #6 0x3a9b856 in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:533:23 #7 0x3a95628 in NActors::TGenericExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:248:28 #8 0x3a9ea84 in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*)::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/library/actors/core/executor_thread.cpp:425:39 #9 0x3a9d98d in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*) /-S/ydb/library/actors/core/executor_thread.cpp:479:13 #10 0x3aa037a in NActors::TExecutorThread::ThreadProc() /-S/ydb/library/actors/core/executor_thread.cpp:510:9 #11 0x2957274 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #12 0x260b648 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 Indirect leak of 5600 byte(s) in 70 object(s) allocated from: #0 0x264168d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x6a3be71 in std::__y1::pair>, NWilson::TTraceId> NKikimr::TSkeleton::CreatePutLogEvent(NActors::TActorContext const&, TBasicString>, NActors::TActorId, unsigned long, NLWTrace::TOrbit&&, NKikimr::TSkeleton::TVPutInfo&, std::__y1::unique_ptr>) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:449:17 #2 0x6a36f42 in NKikimr::TSkeleton::PrivateHandle(TAutoPtr, TDelete>&, NActors::TActorContext const&) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:795:42 #3 0x6a69ca9 in NKikimr::TSkeleton::StateNormal(TAutoPtr&) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp:2710:9 #4 0x3a9b856 in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:533:23 #5 0x3a95628 in NActors::TGenericExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:248:28 ... nters> > &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:686:30 #9 0x68de605 in NKikimr::TSkeletonFront::Bootstrap(NActors::TActorContext const&) /-S/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp:727:33 #10 0x68de14a in NActors::TActorBootstrapped::StateBootstrap(TAutoPtr&) /-S/ydb/library/actors/core/actor_bootstrapped.h:22:22 #11 0x3a9b856 in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:533:23 #12 0x3a95628 in NActors::TGenericExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:248:28 #13 0x3a9ea84 in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*)::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/library/actors/core/executor_thread.cpp:425:39 #14 0x3a9d98d in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*) /-S/ydb/library/actors/core/executor_thread.cpp:479:13 #15 0x3aa037a in NActors::TExecutorThread::ThreadProc() /-S/ydb/library/actors/core/executor_thread.cpp:510:9 #16 0x2957274 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #17 0x260b648 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 Indirect leak of 24 byte(s) in 1 object(s) allocated from: #0 0x264168d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x5a04135 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:272:10 #2 0x5a04135 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:298:10 #3 0x5a04135 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:114:38 #4 0x5a04135 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:55:19 #5 0x5a04135 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:384:29 #6 0x5a04135 in std::__y1::vector>::__append(unsigned long) /-S/contrib/libs/cxxsupp/libcxx/include/vector:1194:53 #7 0x59f1344 in resize /-S/contrib/libs/cxxsupp/libcxx/include/vector:2049:15 #8 0x59f1344 in NKikimr::TBlobStorageGroupInfo::TBlobStorageGroupInfo(NKikimr::TBlobStorageGroupType, unsigned int, unsigned int, unsigned int, TVector> const*, NKikimr::TBlobStorageGroupInfo::EEncryptionMode, NKikimr::TBlobStorageGroupInfo::ELifeCyclePhase, NKikimr::TCypherKey, TIdWrapper) /-S/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo.cpp:580:25 #9 0x5e62dc1 in TConfiguration::Prepare(IVDiskSetup*, bool, bool) /-S/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp:317:21 #10 0x246dbaa in ChaoticWriteRestartWrite(TChaoticWriteRestartWriteSettings const&, TDuration) /-S/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp:42:10 #11 0x24d0a14 in NTestSuiteTBsLocalRecovery::TTestCaseChaoticWriteRestartHugeDecreased::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:586:9 #12 0x250f2d7 in operator() /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1 #13 0x250f2d7 in __invoke<(lambda at /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344:25 #14 0x250f2d7 in __call<(lambda at /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419:5 #15 0x250f2d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195:16 #16 0x250f2d7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366:12 #17 0x29d2f28 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519:16 #18 0x29d2f28 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170:12 #19 0x29d2f28 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #20 0x299a638 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #21 0x250e4a3 in NTestSuiteTBsLocalRecovery::TCurrentTest::Execute() /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1 #22 0x299bf05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #23 0x29ccadc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #24 0x7f2ce35bed8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 24 byte(s) in 1 object(s) allocated from: #0 0x264168d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x5a07636 in NKikimr::IBlobToDiskMapper::CreateBasicMapper(NKikimr::TBlobStorageGroupInfo::TTopology const*) /-S/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_blobmap.cpp:221:16 #2 0x59eea79 in CreateMapper /-S/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo.cpp:477:20 #3 0x59eea79 in NKikimr::TBlobStorageGroupInfo::TTopology::FinalizeConstruction() /-S/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo.cpp:355:22 #4 0x59f1a44 in NKikimr::TBlobStorageGroupInfo::TBlobStorageGroupInfo(NKikimr::TBlobStorageGroupType, unsigned int, unsigned int, unsigned int, TVector> const*, NKikimr::TBlobStorageGroupInfo::EEncryptionMode, NKikimr::TBlobStorageGroupInfo::ELifeCyclePhase, NKikimr::TCypherKey, TIdWrapper) /-S/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo.cpp:588:15 #5 0x5e62dc1 in TConfiguration::Prepare(IVDiskSetup*, bool, bool) /-S/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp:317:21 #6 0x246dbaa in ChaoticWriteRestartWrite(TChaoticWriteRestartWriteSettings const&, TDuration) /-S/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp:42:10 #7 0x24d0a14 in NTestSuiteTBsLocalRecovery::TTestCaseChaoticWriteRestartHugeDecreased::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:586:9 #8 0x250f2d7 in operator() /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1 #9 0x250f2d7 in __invoke<(lambda at /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344:25 #10 0x250f2d7 in __call<(lambda at /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419:5 #11 0x250f2d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195:16 #12 0x250f2d7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366:12 #13 0x29d2f28 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519:16 #14 0x29d2f28 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170:12 #15 0x29d2f28 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #16 0x299a638 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #17 0x250e4a3 in NTestSuiteTBsLocalRecovery::TCurrentTest::Execute() /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1 #18 0x299bf05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #19 0x29ccadc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #20 0x7f2ce35bed8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 16 byte(s) in 1 object(s) allocated from: #0 0x264168d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x59eeb04 in CreateQuorumChecker /-S/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo.cpp:508:20 #2 0x59eeb04 in NKikimr::TBlobStorageGroupInfo::TTopology::FinalizeConstruction() /-S/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo.cpp:357:25 #3 0x59f1a44 in NKikimr::TBlobStorageGroupInfo::TBlobStorageGroupInfo(NKikimr::TBlobStorageGroupType, unsigned int, unsigned int, unsigned int, TVector> const*, NKikimr::TBlobStorageGroupInfo::EEncryptionMode, NKikimr::TBlobStorageGroupInfo::ELifeCyclePhase, NKikimr::TCypherKey, TIdWrapper) /-S/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo.cpp:588:15 #4 0x5e62dc1 in TConfiguration::Prepare(IVDiskSetup*, bool, bool) /-S/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp:317:21 #5 0x246dbaa in ChaoticWriteRestartWrite(TChaoticWriteRestartWriteSettings const&, TDuration) /-S/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp:42:10 #6 0x24d0a14 in NTestSuiteTBsLocalRecovery::TTestCaseChaoticWriteRestartHugeDecreased::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:586:9 #7 0x250f2d7 in operator() /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1 #8 0x250f2d7 in __invoke<(lambda at /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344:25 #9 0x250f2d7 in __call<(lambda at /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419:5 #10 0x250f2d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195:16 #11 0x250f2d7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366:12 #12 0x29d2f28 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519:16 #13 0x29d2f28 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170:12 #14 0x29d2f28 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #15 0x299a638 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #16 0x250e4a3 in NTestSuiteTBsLocalRecovery::TCurrentTest::Execute() /-S/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp:439:1 #17 0x299bf05 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #18 0x29ccadc in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #19 0x7f2ce35bed8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 1552688 byte(s) leaked in 2082 allocation(s). >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> DataStreams::TestStreamStorageRetention |78.8%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] >> DataStreams::TestPutRecordsOfAnauthorizedUser >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] >> TPopulatorTest::Boot >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TPopulatorTest::RemoveDir >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TPopulatorTest::Boot [GOOD] >> TraverseDatashard::TraverseTwoTables >> ClosedIntervalSet::Difference [GOOD] >> ClosedIntervalSet::Contains >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] >> DataStreams::TestUpdateStorage >> ClosedIntervalSet::Contains [GOOD] >> ClosedIntervalSet::EnumInRange >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:39.823047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:39.823130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:39.823179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:39.823225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:39.823273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:39.823298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:39.823356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:39.823672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:39.899893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:39.899948Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:39.909940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:39.913724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:39.913899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:39.920252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:39.920797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:39.921382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:39.921700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:39.926765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:39.927947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:39.928003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:39.928233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:39.928284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:39.928321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:39.928403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:39.934633Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:40.079929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.080113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.080267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:40.080416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:40.080461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.087607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.087744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:40.088010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.088064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:40.088150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:40.088192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:40.090129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.090191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:40.090240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:40.091943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.091983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.092017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.092078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.104033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:40.105962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:40.106157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:40.107110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.107226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.107269Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.107536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:40.107603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.107756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:40.107841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:40.110322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:40.110371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:40.110543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.110577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:40.110845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.110884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:40.110964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:40.111011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.111056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:40.111098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.111148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:40.111177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:40.111234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:40.111273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:40.111301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:40.112900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:40.112990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:40.113018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:40.113066Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:40.113100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:40.113193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.548991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T23:04:40.549299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T23:04:40.549338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T23:04:40.549778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:40.549887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:04:40.549936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:641:2564] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2024-11-21T23:04:40.552841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.553054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.553245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2024-11-21T23:04:40.555817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.555991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:04:40.556326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T23:04:40.556369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T23:04:40.556794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T23:04:40.556889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T23:04:40.556924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:648:2571] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2024-11-21T23:04:40.559730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.559962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.560184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2024-11-21T23:04:40.562314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.562504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2024-11-21T23:04:40.562820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T23:04:40.562870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T23:04:40.563307Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T23:04:40.563407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T23:04:40.563448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:655:2578] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2024-11-21T23:04:40.566185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.566408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.566613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2024-11-21T23:04:40.571286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.571457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T23:04:40.571849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T23:04:40.571891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T23:04:40.572414Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T23:04:40.572502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T23:04:40.572537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:662:2585] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2024-11-21T23:04:40.575398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.575585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.575808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2024-11-21T23:04:40.578243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.578423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2024-11-21T23:04:40.578795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2024-11-21T23:04:40.578842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2024-11-21T23:04:40.579314Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2024-11-21T23:04:40.579406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2024-11-21T23:04:40.579443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:669:2592] TestWaitNotification: OK eventTxId 109 >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> DataStreams::TestDeleteStream >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> DataStreams::TestControlPlaneAndMeteringData |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] >> TPopulatorTest::RemoveDir [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2024-11-21T23:04:41.449715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:41.449768Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:40.124252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:40.124342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.124388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:40.124440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:40.124477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:40.124508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:40.124572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.124855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:40.204306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:40.204358Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:40.221723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:40.226190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:40.226358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:40.240992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:40.241596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:40.242187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.242485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:40.246288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.247456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:40.247515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.247742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:40.247804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:40.247842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:40.247922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.254420Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:40.383255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.383447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.383629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:40.383857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:40.383915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.387480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.387636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:40.387905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.387964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:40.388008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:40.388038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:40.389896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.389964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:40.389997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:40.391702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.391746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.391802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.391853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.395400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:40.397232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:40.397408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:40.398370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.398696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.398737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.399007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:40.399071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.399202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:40.399278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:40.401008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:40.401052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:40.401181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.401212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:40.401573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.401623Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:40.401689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:40.401710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.401743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:40.401779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.401828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:40.401853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:40.401895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:40.401919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:40.401941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:40.403626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:40.403718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:40.403756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:40.403823Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:40.403860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:40.403948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : 72075186233409548 TxId: 104 Status: OK 2024-11-21T23:04:40.785532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2024-11-21T23:04:40.785575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T23:04:40.785613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T23:04:40.791364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T23:04:40.791600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T23:04:40.791637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T23:04:40.792010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:04:40.792065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T23:04:40.792109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:04:40.828349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.828497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 382 RawX2: 4294969647 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.828563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2024-11-21T23:04:40.828645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T23:04:40.905328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2024-11-21T23:04:40.905505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T23:04:40.905581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T23:04:40.905626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.905688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T23:04:40.905872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T23:04:40.906026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:04:40.906087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:40.908621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.909229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:40.909284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:04:40.909512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:04:40.909689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.909729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T23:04:40.909773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T23:04:40.910126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.910170Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T23:04:40.910257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T23:04:40.910288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:04:40.910330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T23:04:40.910373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:04:40.910443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T23:04:40.910489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T23:04:40.910629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:04:40.910682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2024-11-21T23:04:40.910724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T23:04:40.910754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T23:04:40.911891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:04:40.911988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:04:40.912027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T23:04:40.912061Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T23:04:40.912095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:04:40.912788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:04:40.912858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:04:40.912889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T23:04:40.912927Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T23:04:40.912956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:40.913007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2024-11-21T23:04:40.913054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:410:2377] 2024-11-21T23:04:40.916835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:04:40.918028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:04:40.918104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:04:40.918156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:543:2479] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2024-11-21T23:04:40.930766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.931009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.931167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2024-11-21T23:04:40.933245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.933387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T23:04:40.933627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T23:04:40.933665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T23:04:40.934046Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:40.934123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:04:40.934167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:643:2568] TestWaitNotification: OK eventTxId 105 >> TPopulatorTestWithResets::UpdateAck >> TYardTest::TestLogWriteCutEqual [GOOD] >> TYardTest::TestLogWriteCutEqualRandomWait >> DataStreams::TestGetShardIterator |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2024-11-21T23:04:41.656685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:41.656736Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2024-11-21T23:04:41.733187Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2024-11-21T23:04:41.733292Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2024-11-21T23:04:41.734760Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.734849Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.734881Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.735492Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2024-11-21T23:04:41.735538Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2024-11-21T23:04:41.735650Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2024-11-21T23:04:41.735716Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2024-11-21T23:04:41.735750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2024-11-21T23:04:41.735912Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T23:04:41.735967Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.736026Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.736061Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.736182Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T23:04:41.736207Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2024-11-21T23:04:41.736270Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2024-11-21T23:04:41.736306Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2024-11-21T23:04:41.736365Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2024-11-21T23:04:41.736443Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T23:04:41.736733Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:93:2120], cookie# 100 2024-11-21T23:04:41.737134Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:94:2121], cookie# 100 2024-11-21T23:04:41.737168Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2024-11-21T23:04:41.737371Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:95:2122], cookie# 100 2024-11-21T23:04:41.737397Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T23:04:41.739527Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2024-11-21T23:04:41.739566Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2024-11-21T23:04:41.739680Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.739739Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.739791Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:41.740272Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2024-11-21T23:04:41.740319Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2024-11-21T23:04:41.740415Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [Owner ... 45684Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2024-11-21T23:04:41.745746Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2024-11-21T23:04:41.745784Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2024-11-21T23:04:41.745857Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:93:2120], cookie# 101 2024-11-21T23:04:41.745891Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.745923Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.745958Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.746081Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:94:2121], cookie# 101 2024-11-21T23:04:41.746121Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2024-11-21T23:04:41.746191Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2024-11-21T23:04:41.746223Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2024-11-21T23:04:41.746263Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2024-11-21T23:04:41.746484Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:95:2122], cookie# 101 2024-11-21T23:04:41.746536Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 101 2024-11-21T23:04:41.746739Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 101 2024-11-21T23:04:41.746777Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2024-11-21T23:04:41.747009Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 101 2024-11-21T23:04:41.747038Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T23:04:41.748380Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 101, event size# 219, preserialized size# 2 2024-11-21T23:04:41.748429Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2024-11-21T23:04:41.748531Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.748571Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.748599Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.748824Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 101, event size# 303, preserialized size# 0 2024-11-21T23:04:41.748856Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2024-11-21T23:04:41.748928Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2024-11-21T23:04:41.748967Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2024-11-21T23:04:41.748998Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:04:41.749094Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:93:2120], cookie# 101 2024-11-21T23:04:41.749128Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.749168Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.749200Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 101 2024-11-21T23:04:41.749383Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:94:2121], cookie# 101 2024-11-21T23:04:41.749422Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2024-11-21T23:04:41.749482Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2024-11-21T23:04:41.749523Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2024-11-21T23:04:41.749552Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2024-11-21T23:04:41.749722Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:95:2122], cookie# 101 2024-11-21T23:04:41.749877Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:93:2120], cookie# 101 2024-11-21T23:04:41.749957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:94:2121], cookie# 101 2024-11-21T23:04:41.749984Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2024-11-21T23:04:41.750353Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:95:2122], cookie# 101 2024-11-21T23:04:41.750408Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:40.727750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:40.727842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.727878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:40.727923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:40.727973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:40.728000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:40.728048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.728349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:40.802460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:40.802518Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:40.814231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:40.818091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:40.818244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:40.823713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:40.824234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:40.824816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.825075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:40.828773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.829905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:40.829956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.830168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:40.830212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:40.830260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:40.830342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.836276Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:40.957599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.957793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.957986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:40.958171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:40.958223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.962302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.962452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:40.962618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.962678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:40.962719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:40.962753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:40.964448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.964495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:40.964525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:40.965950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.965990Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.966033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.966082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.978219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:40.980141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:40.980317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:40.981205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.981319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:40.981358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.981597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:40.981638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:40.981789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:40.981864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:40.983757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:40.983811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:40.983947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.983983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:40.984323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.984364Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:40.984445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:40.984482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.984526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:40.984566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:40.984617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:40.984646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:40.984700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:40.984735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:40.984764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:40.991261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:40.991379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:40.991412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:40.991459Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:40.991516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:40.991624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... tatus StatusSuccess 2024-11-21T23:04:41.611877Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:41.613048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:773:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:776:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:777:2058] recipient: [1:775:2685] Leader for TabletID 72057594046678944 is [1:778:2686] sender: [1:779:2058] recipient: [1:775:2685] 2024-11-21T23:04:41.652062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:41.652148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:41.652189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:41.652220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:41.652249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:41.652273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:41.652317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:41.652580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:41.667113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:41.668323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:41.668516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:41.668668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:41.668702Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:41.668907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:41.669499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T23:04:41.669580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:04:41.669632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:04:41.669697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.669809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.670046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:04:41.670293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.670553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.670648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.670751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2024-11-21T23:04:41.670788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:04:41.670815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:04:41.670836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T23:04:41.670857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:04:41.670950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.671011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.671229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2024-11-21T23:04:41.671386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:04:41.671720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.671855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.672240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.672320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.672517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.672602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.672686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.672852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.672943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.673121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.673341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.673489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.673533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.673585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.680983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:41.681055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.681616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:41.681670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:41.681709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:41.683666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:40.971025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:40.971082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.971115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:40.971139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:40.971167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:40.971183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:40.971217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.971424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:41.035919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:41.035967Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:41.051354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:41.055437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:41.055587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:41.063125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:41.064050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:41.064630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.064871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:41.070802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.071978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:41.072038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.072252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:41.072303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:41.072357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:41.072447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.087142Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:41.203529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:41.203714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.203896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:41.204097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:41.204155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.207117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.207233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:41.207371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.207428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:41.207460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:41.207488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:41.211188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.211252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:41.211290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:41.215550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.215598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.215639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.215692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.224564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:41.227163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:41.227321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:41.228217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.228328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:41.228373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.228708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:41.228762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.228898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:41.228975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:41.230919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:41.230975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:41.231123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.231158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:41.231486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.231527Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:41.231607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:41.231642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.231678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:41.231727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.231783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:41.231821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:41.231883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:41.231927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:41.231970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:41.233596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:41.233699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:41.233731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:41.233783Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:41.233818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:41.233897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... es { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:42.229665Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:636:2058] recipient: [2:100:2135] Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:639:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:123:2149] sender: [2:640:2058] recipient: [2:638:2562] Leader for TabletID 72057594046678944 is [2:641:2563] sender: [2:642:2058] recipient: [2:638:2562] 2024-11-21T23:04:42.264239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:42.264315Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:42.264364Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:42.264399Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:42.264432Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:42.264464Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:42.264512Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:42.266879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:42.285842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:42.287254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:42.287441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:42.287568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:42.287589Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:42.287660Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:42.288254Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T23:04:42.288338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:04:42.288372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:04:42.288436Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.288494Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.289582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:04:42.289749Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.289936Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.290297Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.290365Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2024-11-21T23:04:42.290404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:04:42.290423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:04:42.290437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T23:04:42.290451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:04:42.290524Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.290575Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.290733Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2024-11-21T23:04:42.290849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:04:42.291098Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.291162Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.291399Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.291442Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.291573Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292282Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292347Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292469Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292537Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292706Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292834Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292923Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292956Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.292994Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:42.301185Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:42.301272Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:42.301508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:42.301560Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:42.301602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:42.303802Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:641:2563] sender: [2:700:2058] recipient: [2:15:2062] 2024-11-21T23:04:42.363826Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:04:42.364056Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 271us result status StatusSuccess 2024-11-21T23:04:42.364605Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] >> TYardTest::TestStartingPointReboots [GOOD] >> TYardTest::TestRestartAtNonceJump >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> DataStreams::TestGetRecordsStreamWithSingleShard >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BrokenJsonResponse-] |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy >> TPersQueueTest::ReadFromSeveralPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:41.526749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:41.526853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:41.526922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:41.526982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:41.527030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:41.527084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:41.527154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:41.527492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:41.609889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:41.609952Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:41.631627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:41.635782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:41.635951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:41.647017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:41.647718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:41.648389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.648675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:41.656201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.657508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:41.657575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.657827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:41.657884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:41.657927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:41.658024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.670664Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:41.780499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:41.780679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.780865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:41.781039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:41.781097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.784356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.784505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:41.784693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.784763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:41.784805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:41.784838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:41.787031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.787112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:41.787157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:41.792328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.792393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.792435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.792514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.796447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:41.798667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:41.798889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:41.799869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.799978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:41.800015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.800262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:41.800303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.800442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:41.800512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:41.802824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:41.802876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:41.803055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.803111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:41.803468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.803517Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:41.803630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:41.803679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.803727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:41.803804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.803845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:41.803884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:41.803958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:41.804000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:41.804029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:41.805875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:41.805981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:41.806016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:41.806069Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:41.806108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:41.806210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... erationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T23:04:43.642629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T23:04:43.642671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T23:04:43.643070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:43.643118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T23:04:43.643184Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:43.685487Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:43.685655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 378 RawX2: 8589936941 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:43.685727Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2024-11-21T23:04:43.685788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T23:04:43.776185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T23:04:43.776376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T23:04:43.776466Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T23:04:43.776529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:43.776569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T23:04:43.776733Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T23:04:43.776925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:43.785321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:43.785653Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:43.785706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:04:43.786027Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:43.786076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T23:04:43.786308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:43.786360Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T23:04:43.786507Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T23:04:43.786559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:04:43.786611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T23:04:43.786657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:04:43.786700Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T23:04:43.786741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T23:04:43.786889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:04:43.786935Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T23:04:43.786973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T23:04:43.788178Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:04:43.788281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:04:43.788326Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T23:04:43.788373Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:04:43.788418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:43.788502Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T23:04:43.788543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:409:2378] 2024-11-21T23:04:43.802154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T23:04:43.802273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:04:43.802317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:691:2615] TestWaitNotification: OK eventTxId 105 2024-11-21T23:04:43.812155Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:04:43.812433Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 313us result status StatusSuccess 2024-11-21T23:04:43.813166Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ClosedIntervalSet::EnumInRange [GOOD] >> ClosedIntervalSet::EnumInRangeReverse >> TPopulatorTestWithResets::UpdateAck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:40.924369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:40.924446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.924496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:40.924533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:40.924573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:40.924600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:40.924656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.924964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:40.995325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:40.995367Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:41.008633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:41.012653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:41.012909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:41.017750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:41.018081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:41.018490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.018656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:41.021295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.022450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:41.022501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.022714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:41.022760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:41.022794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:41.022871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.028866Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:41.153333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:41.153524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.153715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:41.153909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:41.153964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.156048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.156175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:41.156343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.156401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:41.156453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:41.156484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:41.158147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.158200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:41.158231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:41.159725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.159781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.159815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.159867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.163466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:41.165159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:41.165351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:41.166203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.166339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:41.166383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.166687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:41.166734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.166880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:41.166962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:41.169745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:41.169799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:41.169944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.169978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:41.170291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.170338Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:41.170435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:41.170465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.170504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:41.170556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.170601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:41.170645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:41.170717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:41.170761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:41.170795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:41.172540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:41.172637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:41.172670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:41.172715Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:41.172751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:41.172850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T23:04:43.420696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:43.420733Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T23:04:43.420837Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T23:04:43.420864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:04:43.420899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T23:04:43.420928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:04:43.420955Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T23:04:43.420976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T23:04:43.421057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:04:43.421083Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T23:04:43.421107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T23:04:43.421519Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:04:43.421579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:04:43.421607Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T23:04:43.421635Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:04:43.421663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:43.421716Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T23:04:43.421756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T23:04:43.425762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T23:04:43.425874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:04:43.425909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:712:2633] TestWaitNotification: OK eventTxId 105 2024-11-21T23:04:43.994357Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:43.994617Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 284us result status StatusSuccess 2024-11-21T23:04:43.995221Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:44.069661Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:04:44.069957Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 323us result status StatusSuccess 2024-11-21T23:04:44.070556Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2024-11-21T23:04:44.073547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:44.073718Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T23:04:44.073853Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2024-11-21T23:04:44.084217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:44.084504Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:04:44.085003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T23:04:44.085070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T23:04:44.085653Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T23:04:44.085775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T23:04:44.085828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:782:2696] TestWaitNotification: OK eventTxId 106 >> TPersQueueTest::DirectReadPreCached >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2024-11-21T23:04:44.642021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:44.642365Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2024-11-21T23:04:45.082737Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 311, preserialized size# 48 2024-11-21T23:04:45.083272Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2024-11-21T23:04:45.093471Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:45.094077Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:45.094108Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 100 2024-11-21T23:04:45.096421Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 217, preserialized size# 2 2024-11-21T23:04:45.096458Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T23:04:45.135264Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 321, preserialized size# 53 2024-11-21T23:04:45.135324Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2024-11-21T23:04:45.136385Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:68:2107], cookie# 100, event size# 222, preserialized size# 2 2024-11-21T23:04:45.137442Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T23:04:45.180341Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2024-11-21T23:04:45.180512Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2121] Successful handshake: replica# [1:15:2062] 2024-11-21T23:04:45.180669Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:94:2121] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:04:45.180970Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2024-11-21T23:04:45.180988Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2122] Successful handshake: replica# [1:18:2065] 2024-11-21T23:04:45.181142Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2122] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:04:45.181320Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2024-11-21T23:04:45.181463Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:93:2120] Successful handshake: replica# [1:12:2059] 2024-11-21T23:04:45.181483Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:93:2120] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:04:45.181689Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:94:2121] 2024-11-21T23:04:45.182368Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:94:2121] 2024-11-21T23:04:45.182447Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T23:04:45.182563Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T23:04:45.182666Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:94:2121] 2024-11-21T23:04:45.182743Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:95:2122] 2024-11-21T23:04:45.182780Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T23:04:45.182828Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T23:04:45.182897Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:95:2122] 2024-11-21T23:04:45.182948Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2024-11-21T23:04:45.182991Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T23:04:45.183045Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:95:2122] 2024-11-21T23:04:45.183172Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2024-11-21T23:04:45.183200Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T23:04:45.183262Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:93:2120] 2024-11-21T23:04:45.183300Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:92:2119] 2024-11-21T23:04:45.183338Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2024-11-21T23:04:45.183384Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:93:2120] 2024-11-21T23:04:45.183410Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T23:04:45.183431Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2024-11-21T23:04:45.183476Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:93:2120] 2024-11-21T23:04:45.183515Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2024-11-21T23:04:45.183560Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:94:2121] 2024-11-21T23:04:45.183600Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:92:2119], cookie# 0 2024-11-21T23:04:45.183863Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T23:04:45.183934Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:95:2122] 2024-11-21T23:04:45.183972Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2024-11-21T23:04:45.184006Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:94:2121] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2024-11-21T23:04:45.184047Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T23:04:45.184173Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 0 2024-11-21T23:04:45.184201Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:94:2121], cookie# 0 2024-11-21T23:04:45.184230Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2024-11-21T23:04:45.184259Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:94:2121], cookie# 100 2024-11-21T23:04:45.184285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 0 2024-11-21T23:04:45.184300Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:94:2121], cookie# 0 2024-11-21T23:04:45.184341Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:94:2121], cookie# 100 2024-11-21T23:04:45.184376Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:93:2120] 2024-11-21T23:04:45.184408Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:92:2119] 2024-11-21T23:04:45.184450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 0 2024-11-21T23:04:45.184486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 0 2024-11-21T23:04:45.184525Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:93:2120] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2024-11-21T23:04:45.184562Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:95:2122], cookie# 100 2024-11-21T23:04:45.184588Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2024-11-21T23:04:45.184622Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2024-11-21T23:04:45.184909Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 0 2024-11-21T23:04:45.184927Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:95:2122], cookie# 0 2024-11-21T23:04:45.184950Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:95:2122], cookie# 100 2024-11-21T23:04:45.184965Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2024-11-21T23:04:45.185078Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:92:2119] Ack update: ack to# [1:68:2107], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2024-11-21T23:04:45.185446Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 0 2024-11-21T23:04:45.185466Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 0 2024-11-21T23:04:45.185489Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:93:2120], cookie# 100 2024-11-21T23:04:45.185511Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 100 2024-11-21T23:04:45.185751Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 0 2024-11-21T23:04:45.185780Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 0 2024-11-21T23:04:45.185914Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:93:2120], cookie# 100 2024-11-21T23:04:45.185931Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:92:2119] Ack for unknown update (already acked?): sender# [1:93:2120], cookie# 100 TestWaitNotification: OK eventTxId 100 |78.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:40.777951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:40.778022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.778057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:40.778086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:40.778111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:40.778137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:40.778179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:40.778418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:40.847407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:40.847457Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:40.858258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:40.862475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:40.862636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:40.868954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:40.869548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:40.870108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.870386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:40.874580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.875726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:40.875805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:40.876053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:40.876102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:40.876142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:40.876225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.882547Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:40.992645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:40.992839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.993029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:40.993260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:40.993328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.999232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:40.999359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:40.999526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:40.999595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:40.999635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:40.999664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:41.002945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.003007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:41.003054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:41.004752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.004794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.004830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.004893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.008329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:41.010094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:41.010254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:41.011169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:41.011284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:41.011354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.011601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:41.011641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:41.011812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:41.011903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:41.013647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:41.013689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:41.013824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:41.013860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:41.014223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:41.014274Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:41.014363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:41.014422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.014463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:41.014507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:41.014539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:41.014565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:41.014620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:41.014650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:41.014679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:41.016394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:41.016483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:41.016513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:41.016560Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:41.016599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:41.016680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 548 TxId: 104 Status: OK 2024-11-21T23:04:45.184494Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2024-11-21T23:04:45.184539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T23:04:45.184582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T23:04:45.186348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T23:04:45.186629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T23:04:45.186675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T23:04:45.187055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:04:45.187095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T23:04:45.187134Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:04:45.222595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:45.222731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 373 RawX2: 8589936935 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:45.222797Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2024-11-21T23:04:45.222886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T23:04:45.333073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2024-11-21T23:04:45.334033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T23:04:45.334929Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2024-11-21T23:04:45.335223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:45.335543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T23:04:45.341464Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T23:04:45.342173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:04:45.342549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:45.364533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:04:45.365292Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:45.365501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:04:45.366341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:04:45.367216Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:45.367404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T23:04:45.367579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T23:04:45.368860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:04:45.369222Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T23:04:45.369786Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T23:04:45.369943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:04:45.370085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T23:04:45.370289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:04:45.377747Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T23:04:45.378267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T23:04:45.378845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:04:45.379106Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2024-11-21T23:04:45.379561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T23:04:45.379591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T23:04:45.390291Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:04:45.390861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:04:45.391131Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2024-11-21T23:04:45.391717Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T23:04:45.391991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:04:45.393422Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:04:45.393477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:04:45.393496Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T23:04:45.393517Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T23:04:45.393537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:45.393586Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2024-11-21T23:04:45.393831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:404:2372] 2024-11-21T23:04:45.407771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:04:45.407900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:04:45.407965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:04:45.408000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:540:2477] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2024-11-21T23:04:45.447281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:45.448732Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:45.450192Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2024-11-21T23:04:45.467727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:45.469084Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T23:04:45.470286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T23:04:45.478705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T23:04:45.481061Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:45.481155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:04:45.481192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:638:2564] TestWaitNotification: OK eventTxId 105 >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> TYardTest::TestRestartAtNonceJump [GOOD] >> TYardTest::TestRestartAtChunkEnd >> ClosedIntervalSet::EnumInRangeReverse [GOOD] >> GivenIdRange::IssueNewRange [GOOD] >> GivenIdRange::Trim >> TCmsTest::TestForceRestartModeDisconnects >> GivenIdRange::Trim [GOOD] >> GivenIdRange::Subtract |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] >> TCmsTest::StateRequestUnknownNode |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |78.9%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost |78.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.9%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test |78.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> TDataShardLocksTest::UseLocksCache [GOOD] |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] >> TCmsTest::RequestReplaceBrokenDevices >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> TCmsTest::TestForceRestartMode >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> GivenIdRange::Subtract [GOOD] >> GivenIdRange::Points >> TCmsTest::StateStorageNodesFromOneRing >> TCmsTest::ActionIssuePartialPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2024-11-21T23:04:39.633313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:04:39.636575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:39.636730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00280f/r3tmp/tmpZMQhad/pdisk_1.dat 2024-11-21T23:04:40.069693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:04:40.112199Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:40.166373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:40.166566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:40.178042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:40.311456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:40.359421Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:04:40.360630Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:04:40.361047Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2024-11-21T23:04:40.361258Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:40.408722Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:04:40.408825Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:04:40.410122Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:04:40.410488Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:640:2542] 2024-11-21T23:04:40.410678Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:40.419101Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:624:2532], Recipient [1:640:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:04:40.419571Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:40.419724Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:04:40.421267Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:04:40.421346Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:04:40.421402Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:04:40.421708Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:04:40.479521Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:04:40.479728Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:04:40.479889Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:671:2559] 2024-11-21T23:04:40.479934Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:04:40.479983Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:04:40.480018Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:04:40.480358Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:04:40.480406Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:04:40.480798Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:04:40.480886Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:04:40.481087Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:04:40.481127Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:04:40.481165Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:04:40.481204Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:04:40.481238Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:04:40.481271Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:04:40.481315Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:04:40.481741Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2548], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:04:40.481782Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:04:40.481826Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:631:2536], serverId# [1:653:2548], sessionId# [0:0:0] 2024-11-21T23:04:40.481991Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2548] 2024-11-21T23:04:40.482030Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:04:40.482134Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:04:40.482344Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:04:40.482432Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:04:40.482544Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:04:40.482615Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:04:40.482667Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:04:40.482742Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:04:40.482773Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:04:40.483114Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:04:40.483158Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:04:40.483191Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:04:40.483221Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:04:40.483272Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:04:40.483312Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:04:40.483347Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:04:40.483380Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:04:40.483405Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:04:40.483565Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:40.483671Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:04:40.484920Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T23:04:40.485003Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T23:04:40.485048Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T23:04:40.485277Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:04:40.485313Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T23:04:40.485375Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:04:40.485434Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:672:2560] 2024-11-21T23:04:40.485460Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T23:04:40.485484Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T23:04:40.485507Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:04:40.486032Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:640:2542], Recipient [1:640:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:04:40.486071Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:04:40.486570Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T23:04:40.486639Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T23:04:40.486832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:661:2555], Recipient [1:640:2542]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:04:40.486865Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:04:40.486912Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:632:2537], serverId# [1:661:2555], sessionId# [0:0:0] 2024-11-21T23:04:40.487033Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:04:40.487061Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:04:40.487086Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2024-11-21T23:04:40.487110Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T23:04:40.487133Z node 1 :TX_DATASHARD TRACE: Uni ... 8 is DelayComplete 2024-11-21T23:04:51.010827Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:04:51.010849Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:04:51.010870Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:04:51.010910Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2024-11-21T23:04:51.011551Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:04:51.011581Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2024-11-21T23:04:51.025973Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:04:51.026023Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715662] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:04:51.026078Z node 2 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [2:898:2691], exec latency: 9 ms, propose latency: 9 ms 2024-11-21T23:04:51.026140Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T23:04:51.026175Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:04:51.026227Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:04:51.026258Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2024-11-21T23:04:51.026573Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T23:04:51.027094Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:04:51.027649Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:938:2748], Recipient [2:639:2541]: {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T23:04:51.027923Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:04:51.027963Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2024-11-21T23:04:51.033491Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:639:2541]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T23:04:51.355885Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fh7kfe1expkb0d28w8yga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDZkZjc1NTYtNThmZjA4Y2UtODY4YWY0MzktYWU4OTc2NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:04:51.378083Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:979:2774], Recipient [2:938:2748]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T23:04:51.387288Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:04:51.387896Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2024-11-21T23:04:51.388556Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T23:04:51.389085Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:04:51.389736Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:04:51.390700Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:04:51.390765Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2024-11-21T23:04:51.391083Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T23:04:51.391112Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:04:51.391549Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:04:51.391584Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:04:51.392637Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T23:04:51.395820Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2024-11-21T23:04:51.396130Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:979:2774], 0} after executionsCount# 1 2024-11-21T23:04:51.396433Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:979:2774], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:04:51.396785Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:979:2774], 0} finished in read 2024-11-21T23:04:51.396859Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T23:04:51.396884Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:04:51.396907Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:04:51.396929Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:04:51.396971Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T23:04:51.396997Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:04:51.397027Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2024-11-21T23:04:51.397526Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:04:51.403139Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T23:04:51.413637Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:979:2774], Recipient [2:938:2748]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T23:04:51.413959Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T23:04:51.415144Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:979:2774], Recipient [2:639:2541]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2024-11-21T23:04:51.415235Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T23:04:51.415291Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2024-11-21T23:04:51.415349Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T23:04:51.415375Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2024-11-21T23:04:51.415400Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T23:04:51.415424Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T23:04:51.415469Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2024-11-21T23:04:51.415518Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T23:04:51.415539Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T23:04:51.415559Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T23:04:51.415576Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2024-11-21T23:04:51.415647Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2024-11-21T23:04:51.417467Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2024-11-21T23:04:51.417505Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:979:2774], 1} after executionsCount# 1 2024-11-21T23:04:51.417538Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:979:2774], 1} sends rowCount# 2, bytes# 48, quota rows left# 997, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:04:51.417595Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:979:2774], 1} finished in read 2024-11-21T23:04:51.417920Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T23:04:51.417942Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T23:04:51.417961Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:04:51.418598Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:04:51.418968Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T23:04:51.418987Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:04:51.419008Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2024-11-21T23:04:51.419315Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T23:04:51.419387Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T23:04:51.423086Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:979:2774], Recipient [2:639:2541]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2024-11-21T23:04:51.423130Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:46.734283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:46.736759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:46.737221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:46.737779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:46.738020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:46.738295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:46.738866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:46.743579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:47.208690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:47.208944Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:47.293682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:47.303679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:47.303864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:47.329830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:47.332267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:47.335055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:47.335814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:47.363171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:47.368608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:47.368649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:47.370028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:47.370262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:47.371007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:47.372503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:47.415576Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:48.148264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:48.148498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.148734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:48.148949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:48.149022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.159685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:48.159854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:48.160060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.160111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:48.160181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:48.160212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:48.161987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.162047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:48.162081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:48.167018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.167068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.167102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:48.167245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:48.184307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:48.191245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:48.191523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:48.193812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:48.194033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:48.194113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:48.194479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:48.194541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:48.194747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:48.194861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:48.201501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:48.201560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:48.201743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:48.201785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:48.202112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.202157Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:48.202249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:48.202284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:48.202324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:48.202362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:48.202436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:48.202471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:48.202559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:48.202598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:48.202628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:48.204399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:48.204487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:48.204522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:48.204732Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:48.204767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:48.204992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... olution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:49.764283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:749:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:752:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:753:2058] recipient: [1:751:2665] Leader for TabletID 72057594046678944 is [1:754:2666] sender: [1:755:2058] recipient: [1:751:2665] 2024-11-21T23:04:49.943617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:49.943727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:49.943769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:49.943820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:49.943852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:49.943881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:49.943940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:49.944231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:49.968920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:49.970141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:49.970331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:49.970657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:49.970690Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:49.970793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:49.971550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2024-11-21T23:04:49.971644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:04:49.971679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:04:49.971731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.971819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.972120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:04:49.972365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.972789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.972903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.973018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2024-11-21T23:04:49.973063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:04:49.973090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:04:49.973109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T23:04:49.973126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:04:49.973299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.973373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.973584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2024-11-21T23:04:49.973782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:04:49.974102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.974216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.974625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.974688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.974910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.975995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:49.981576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:49.981640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:49.982372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:49.982439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:49.982489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:49.983980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:754:2666] sender: [1:812:2058] recipient: [1:15:2062] 2024-11-21T23:04:50.070806Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:04:50.073469Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 2.1ms result status StatusSuccess 2024-11-21T23:04:50.083101Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCmsTest::RequestRestartServicesRejectSecond >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:42.939209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:42.939436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:42.940076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:42.940241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:42.940411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:42.940564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:42.940940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:42.944860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:43.435578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:43.435893Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:43.492124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:43.513861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:43.514031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:43.576625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:43.587385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:43.590823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:43.591532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:43.632161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:43.641404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:43.641596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:43.646616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:43.646943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:43.647412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:43.648226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:43.668705Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:44.251104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:44.252825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:44.254265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:44.260586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:44.261206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:44.282481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:44.284338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:44.285604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:44.290472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:44.290741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:44.290893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:44.315205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:44.315702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:44.315960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:44.345344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:44.345403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:44.345677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:44.349721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:44.390362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:44.408189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:44.408768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:44.414519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:44.414718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:44.414773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:44.415201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:44.415256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:44.415430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:44.415516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:44.425132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:44.425262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:44.425871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:44.425976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:44.427102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:44.427330Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:44.427696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:44.427732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:44.427781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:44.427841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:44.427892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:44.427919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:44.427997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:44.428033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:44.428062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:44.429668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:44.438472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:44.438561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:44.438609Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:44.438652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:44.438768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EvProposeTransactionResult> complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:51.599801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:51.599836Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:51.600425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2024-11-21T23:04:51.601606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2024-11-21T23:04:51.618816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2024-11-21T23:04:51.618893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2024-11-21T23:04:51.618960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2024-11-21T23:04:51.619178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T23:04:51.619305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2024-11-21T23:04:51.619359Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2024-11-21T23:04:51.619394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2024-11-21T23:04:51.619433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T23:04:51.630892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T23:04:51.631123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T23:04:51.631159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T23:04:51.631477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:51.631512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T23:04:51.631543Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:51.690352Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:51.699072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 378 RawX2: 8589936941 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:51.702734Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2024-11-21T23:04:51.703051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T23:04:51.924203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2024-11-21T23:04:51.925166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T23:04:51.925226Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2024-11-21T23:04:51.925541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:51.925845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T23:04:51.928219Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T23:04:51.929914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:51.940246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:51.943470Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:51.943519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:04:51.944830Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:51.945165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T23:04:51.950803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:51.951135Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T23:04:51.952404Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T23:04:51.952933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:04:51.953301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T23:04:51.953615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:04:51.953943Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T23:04:51.958640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T23:04:51.959369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:04:51.959710Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2024-11-21T23:04:51.960009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T23:04:51.966818Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:04:51.967432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:04:51.967969Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T23:04:51.968230Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:04:51.968492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:51.968826Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2024-11-21T23:04:51.969620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:409:2378] 2024-11-21T23:04:51.976522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T23:04:51.978663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:04:51.978934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:683:2609] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2024-11-21T23:04:52.020196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:52.021710Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2024-11-21T23:04:52.022725Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2024-11-21T23:04:52.033924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:52.039181Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:04:52.041085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T23:04:52.041842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T23:04:52.046874Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T23:04:52.046998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T23:04:52.047036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:776:2690] TestWaitNotification: OK eventTxId 106 >> TCmsTest::ManagePermissions >> GivenIdRange::Points [GOOD] >> GivenIdRange::Runs >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> GivenIdRange::Runs [GOOD] >> GivenIdRange::Allocate |78.9%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> GivenIdRange::Allocate [GOOD] >> TCmsTest::WalleTasks >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::StateStorageTwoRings >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |78.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> TCmsTest::RequestRestartServicesMultipleNodes >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blob_depot/ut/unittest >> GivenIdRange::Allocate [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] >> TYardTest::TestRestartAtChunkEnd [GOOD] >> TYardTestRestore::TestRestore15 >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> TCmsTest::RequestRestartServicesOk >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:04:48.606727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:04:48.606819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:48.606873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:04:48.606908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:04:48.606958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:04:48.606985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:04:48.607040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:04:48.607334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:48.720702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:04:48.720757Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:48.737904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:48.741577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:04:48.741759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:04:48.747893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:04:48.748466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:48.749046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:48.749346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:04:48.753164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:48.754268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:48.754323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:48.754569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:04:48.754611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:48.754659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:04:48.754760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.760149Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:04:48.937065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:04:48.937326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.937562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:04:48.937790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:04:48.937853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.951285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:48.951453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:04:48.951671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.951741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:04:48.951778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:04:48.951827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:04:48.959213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.959274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:04:48.959309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:04:48.964182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.964241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.964302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:48.964371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:04:48.968200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:48.984295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:04:48.984543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:04:48.985501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:04:48.985653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:48.985699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:48.985998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:04:48.986075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:04:48.986253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:48.986348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:04:48.993543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:48.993626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:04:48.993805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:48.993842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:04:48.994197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:04:48.994246Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:04:48.994344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:04:48.994379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:48.994443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:04:48.994497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:04:48.994532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:04:48.994559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:04:48.994637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:04:48.994680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:04:48.994711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:04:48.996693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:48.996812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:04:48.996846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:04:48.996900Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:04:48.996935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:04:48.997074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... t TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T23:04:56.625758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T23:04:56.626587Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2024-11-21T23:04:56.632342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:56.643205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:56.643490Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:04:56.643516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:04:56.643728Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:04:56.643753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T23:04:56.644067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:04:56.644370Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T23:04:56.645479Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T23:04:56.645818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:04:56.646362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2024-11-21T23:04:56.648645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:04:56.649251Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T23:04:56.649543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T23:04:56.650218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:04:56.650521Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2024-11-21T23:04:56.650780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T23:04:56.652093Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:04:56.652149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:04:56.654885Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2024-11-21T23:04:56.655181Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:04:56.655473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:04:56.657811Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2024-11-21T23:04:56.672297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T23:04:56.726655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T23:04:56.726953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T23:04:56.730255Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:04:56.730955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:04:56.731255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:771:2684] TestWaitNotification: OK eventTxId 105 2024-11-21T23:04:57.367180Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:57.369025Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 2.11ms result status StatusSuccess 2024-11-21T23:04:57.380702Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:04:57.459052Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:04:57.479186Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 20.1ms result status StatusSuccess 2024-11-21T23:04:57.481695Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> TYardTestRestore::TestRestore15 [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest |78.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> TCmsTenatsTest::TestClusterLimit >> TCmsTest::TestKeepAvailableMode >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTestRestore::TestRestore15 [GOOD] >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTest::StateStorageLockedNodes >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> TMaintenanceApiTest::ActionReason [GOOD] >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock [GOOD] >> TPDiskRaces::OwnerRecreationRaces >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead >> TCmsTest::StateRequest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |78.9%| [TM] {RESULT} ydb/core/blob_depot/ut/unittest |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> TCmsTest::PriorityRange [GOOD] |79.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |79.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> TCmsTest::StateStorageLockedNodes [GOOD] >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplaceDevices |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2024-11-21T23:05:10.559606Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2024-11-21T23:05:10.559938Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2024-11-21T23:05:10.559958Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2024-11-21T23:05:10.560285Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2024-11-21T23:05:10.560308Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2024-11-21T23:05:10.560324Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2024-11-21T23:05:10.560338Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2024-11-21T23:05:10.560353Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2024-11-21T23:05:10.616472Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2024-11-21T23:05:10.616792Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2024-11-21T23:05:10.617485Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2024-11-21T23:05:10.617505Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2024-11-21T23:05:10.617521Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2024-11-21T23:05:10.617537Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2024-11-21T23:05:10.617556Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2024-11-21T23:05:10.617574Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2024-11-21T23:05:10.684831Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2024-11-21T23:05:10.684884Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2024-11-21T23:05:10.684906Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2024-11-21T23:05:10.684926Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2024-11-21T23:05:10.684947Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2024-11-21T23:05:10.684971Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2024-11-21T23:05:10.684991Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2024-11-21T23:05:10.685011Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] >> test.py::test[solomon-BrokenJsonResponse-] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TraverseDatashard::TraverseTwoTables [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestKeepAvailableModeScheduled |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2024-11-21T23:04:45.912668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:04:45.914371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:45.919153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002509/r3tmp/tmpfTMKla/pdisk_1.dat 2024-11-21T23:04:47.971490Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3758, node 1 2024-11-21T23:04:48.838313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:48.838646Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:48.838760Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:48.840297Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:04:48.894642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:04:49.033645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:49.033781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:49.049411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13481 2024-11-21T23:04:51.000212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:05:03.632053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:03.632152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:03.697766Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:05:03.780746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:04.983496Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:05.676350Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:05:05.677262Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:05:05.717828Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:05:05.718876Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:05:05.719086Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:05:05.719149Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:05:05.719217Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:05:05.719313Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:05:05.719365Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:05:05.719410Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:05:05.719873Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:05:06.108148Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:05:06.109065Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:05:06.158292Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T23:05:06.302626Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T23:05:06.303058Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T23:05:06.315419Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T23:05:06.483335Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:05:06.483399Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:05:06.483475Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T23:05:06.541422Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:06.542078Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:06.607422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:05:06.694381Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:05:06.696507Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:05:06.872466Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:05:06.901185Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:06.933951Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:05:07.934031Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:05:08.190973Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:05:10.023916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:10.024033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:10.038906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T23:05:11.329468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2433:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:11.329612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:11.330931Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2438:3075]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:11.331108Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:05:11.331199Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2440:3077] 2024-11-21T23:05:11.331263Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2440:3077] 2024-11-21T23:05:11.331805Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2441:2945] 2024-11-21T23:05:11.332041Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2440:3077], server id = [2:2441:2945], tablet id = 72075186224037897, status = OK 2024-11-21T23:05:11.332188Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2441:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T23:05:11.332259Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T23:05:11.332458Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:05:11.332524Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2438:3075], StatRequests.size() = 1 2024-11-21T23:05:11.349129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:11.349244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:11.349571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2450:3086], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:11.355728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T23:05:11.547460Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T23:05:11.547733Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T23:05:11.634717Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2440:3077], schemeshard count = 1 2024-11-21T23:05:12.051174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2452:3088], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T23:05:12.470226Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2592:3177]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:12.478584Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:05:12.478642Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2592:3177], StatRequests.size() = 1 2024-11-21T23:05:12.864787Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fhvcpbzqre74vgaz7hdy8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhlNDRiZTUtYzc3YjIzZGMtN2JhZGYzZGEtZWM0NGI0Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:05:13.278465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037889 2024-11-21T23:05:13.840048Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2924:3242]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:13.840231Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T23:05:13.840280Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2924:3242], StatRequests.size() = 1 2024-11-21T23:05:13.864254Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:2933:3251]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:13.864434Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2024-11-21T23:05:13.864467Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:2933:3251], StatRequests.size() = 1 2024-11-21T23:05:13.963553Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fhxty6fp8ny8rnd6z2253, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZhODBlYWItYTMxZGE5ZjQtN2NjOTkzNTMtNTE5ZTUzOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:05:14.195362Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2980:3204]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:14.216824Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:05:14.216888Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T23:05:14.217239Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:05:14.217287Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T23:05:14.217338Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T23:05:14.252908Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T23:05:14.255808Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2024-11-21T23:05:14.257539Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3005:3217]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:14.270298Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:05:14.270360Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T23:05:14.290286Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:05:14.290353Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T23:05:14.290411Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T23:05:14.292393Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2024-11-21T23:05:14.292636Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes >> TGroupMapperTest::Mirror3dc |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageRollingRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2024-11-21T23:05:21.810082Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T23:05:21.815278Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T23:05:21.815688Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T23:05:21.820961Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120030000 } } 2024-11-21T23:05:21.848714Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120030000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120030000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120030000 } Timestamp: 120030000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120030000 } 2024-11-21T23:05:21.850367Z node 25 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.004000s 2024-11-21T23:05:21.852533Z node 25 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2024-11-21T23:05:21.852786Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2024-11-21T23:05:21.853150Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2024-11-21T23:05:21.853486Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 25 has not yet been completed) 2024-11-21T23:05:21.854755Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T23:05:21.856906Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T23:05:21.857501Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 25, marker# MARKER_DISK_FAULTY 2024-11-21T23:05:21.859795Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2024-11-21T23:05:21.860106Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2024-11-21T23:05:21.860135Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2024-11-21T23:05:21.860161Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2024-11-21T23:05:21.860419Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2024-11-21T23:05:21.860450Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2024-11-21T23:05:21.860477Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2024-11-21T23:05:21.860764Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2024-11-21T23:05:21.881978Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: ... pdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2024-11-21T23:05:22.207967Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2024-11-21T23:05:22.208201Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2024-11-21T23:05:22.208231Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2024-11-21T23:05:22.208681Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2024-11-21T23:05:22.210828Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T23:05:22.214030Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T23:05:22.216537Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T23:05:22.216643Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T23:05:22.217237Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T23:05:22.217298Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T23:05:22.217583Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T23:05:22.217637Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180030 2024-11-21T23:05:22.217928Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2024-11-21T23:05:22.222770Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2024-11-21T23:05:22.222849Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2024-11-21T23:05:22.222891Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2024-11-21T23:05:22.223054Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2024-11-21T23:05:22.223281Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2024-11-21T23:05:22.223377Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2024-11-21T23:05:22.223417Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2024-11-21T23:05:22.223448Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2024-11-21T23:05:22.256796Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2024-11-21T23:05:22.257134Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2024-11-21T23:05:22.313087Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T23:05:22.313712Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T23:05:22.313773Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2024-11-21T23:05:22.334048Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T23:05:22.343106Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2024-11-21T23:05:22.343809Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2024-11-21T23:05:22.344091Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2024-11-21T23:05:22.344341Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2024-11-21T23:05:22.344362Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2024-11-21T23:05:22.344386Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T23:05:22.345814Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2024-11-21T23:05:22.354807Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2024-11-21T23:05:22.362485Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T23:05:22.363808Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.130000Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2024-11-21T23:05:22.364780Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T23:05:22.384904Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T23:05:22.387007Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2024-11-21T23:05:22.387607Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.130000Z 2024-11-21T23:05:22.488210Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2024-11-21T23:05:22.490747Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T23:05:22.491092Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T23:05:22.491408Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2024-11-21T23:05:22.495228Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T23:05:22.496326Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2024-11-21T23:05:22.497216Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2024-11-21T23:05:22.497573Z node 25 :CMS DEBUG: Result: ALLOW 2024-11-21T23:05:22.516104Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2024-11-21T23:05:22.516784Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2024-11-21T23:05:22.517681Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T23:05:22.525254Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.231512Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2024-11-21T23:05:22.526911Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T23:05:22.568817Z node 25 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T23:05:22.574148Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780231512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2024-11-21T23:05:22.577708Z node 25 :CMS INFO: User user is done with permissions user-p-1 2024-11-21T23:05:22.578043Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T23:05:22.578875Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T23:05:22.579469Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2024-11-21T23:05:22.580514Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2024-11-21T23:05:22.581045Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2024-11-21T23:05:22.625577Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T23:05:22.628670Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2024-11-21T23:05:22.633096Z node 25 :CMS INFO: User user is done with permissions user-p-2 2024-11-21T23:05:22.633339Z node 25 :CMS DEBUG: Resulting status: OK 2024-11-21T23:05:22.633954Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2024-11-21T23:05:22.634500Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2024-11-21T23:05:22.635413Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2024-11-21T23:05:22.636002Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2024-11-21T23:05:22.668788Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2024-11-21T23:05:22.669010Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> TGroupMapperTest::Block42_1disk >> TGroupMapperTest::Mirror3dc [GOOD] >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |79.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TPDiskRaces::OwnerRecreationRaces [GOOD] >> TPDiskTest::CommitDeleteChunks |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TPDiskTest::CommitDeleteChunks [GOOD] >> TPDiskTest::DeviceHaltTooLong |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> TCmsTest::VDisksEviction [GOOD] >> LocalTableWriter::ConsistentWrite >> LocalTableWriter::WriteTable >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestListShards1Shard |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2024-11-21T23:05:31.018185Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2024-11-21T23:05:31.028407Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2024-11-21T23:05:31.029149Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2024-11-21T23:05:31.033447Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028512 } } 2024-11-21T23:05:31.052088Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028512 } Timestamp: 120028512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028512 } 2024-11-21T23:05:31.053023Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.004512s 2024-11-21T23:05:31.053269Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2024-11-21T23:05:31.059073Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2024-11-21T23:05:31.060028Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2024-11-21T23:05:31.060393Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2024-11-21T23:05:31.062174Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T23:05:31.063582Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T23:05:31.064217Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2024-11-21T23:05:31.065556Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2024-11-21T23:05:31.065943Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2024-11-21T23:05:31.065969Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2024-11-21T23:05:31.065995Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2024-11-21T23:05:31.066019Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2024-11-2 ... ices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120541048 } } 2024-11-21T23:05:32.085911Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120541048 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120541048 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120541048 } Timestamp: 120541048 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120541048 } 2024-11-21T23:05:32.086192Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2024-11-21T23:05:32.086259Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2024-11-21T23:05:32.086309Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2024-11-21T23:05:32.087374Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2024-11-21T23:05:32.087612Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2024-11-21T23:05:32.087665Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2024-11-21T23:05:32.087908Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2024-11-21T23:05:32.087964Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2024-11-21T23:05:32.088055Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2024-11-21T23:05:32.088099Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2024-11-21T23:05:32.088138Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2024-11-21T23:05:32.088172Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2024-11-21T23:05:32.088196Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2024-11-21T23:05:32.088219Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2024-11-21T23:05:32.088239Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2024-11-21T23:05:32.088265Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2024-11-21T23:05:32.088447Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2024-11-21T23:05:32.088952Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2024-11-21T23:05:32.089066Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2024-11-21T23:05:32.089151Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2024-11-21T23:05:32.089214Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2024-11-21T23:05:32.089273Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2024-11-21T23:05:32.089324Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2024-11-21T23:05:32.089372Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120442560 ChangeTime: 120442560 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120541 2024-11-21T23:05:32.089416Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2024-11-21T23:05:32.114794Z node 18 :CMS DEBUG: TTxStorePermissions complete 2024-11-21T23:05:32.115082Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2024-11-21T23:05:32.115747Z node 18 :CMS INFO: User user removes request user-r-3 2024-11-21T23:05:32.115804Z node 18 :CMS DEBUG: Resulting status: OK 2024-11-21T23:05:32.115875Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2024-11-21T23:05:32.115932Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2024-11-21T23:05:32.116094Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2024-11-21T23:05:32.138755Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2024-11-21T23:05:32.139413Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TCmsTest::TestLogOperationsRollback [GOOD] >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> LocalTableWriter::SupportedTypes |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TGroupMapperTest::NonUniformCluster2 |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> Mirror3of4::ReplicationHuge [GOOD] >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> KqpScan::ScanRetryReadRanges [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |79.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2024-11-21T23:05:00.254217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:05:00.256099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:00.256731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002756/r3tmp/tmp2UZhib/pdisk_1.dat 2024-11-21T23:05:03.637875Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17038, node 1 2024-11-21T23:05:05.726152Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:05:05.726243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:05:05.726283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:05:05.726806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:05:05.795499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:05:05.976682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:05.977365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:06.067616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28223 2024-11-21T23:05:09.548267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:05:23.698018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:23.698106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:23.755672Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:05:23.761164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:24.550924Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:25.057201Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:05:25.058019Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:05:25.276977Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:05:25.278205Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:05:25.293213Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:05:25.293324Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:05:25.293386Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:05:25.293454Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:05:25.293508Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:05:25.293594Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:05:25.294138Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:05:25.703147Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:05:25.703939Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:05:25.740223Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T23:05:25.816057Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T23:05:25.816509Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T23:05:25.823658Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T23:05:26.016707Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:05:26.016785Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:05:26.016851Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T23:05:26.175545Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:26.175638Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:26.218309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:05:26.266594Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:05:26.267258Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:05:26.349617Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:05:26.388987Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:26.446825Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:05:27.776718Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:05:28.016253Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:05:29.166093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:05:30.900626Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:31.732619Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T23:05:31.732703Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T23:05:31.732838Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2484:2901], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T23:05:31.743394Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2493:2906] 2024-11-21T23:05:31.744334Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2493:2906], schemeshard id = 72075186224037899 2024-11-21T23:05:33.887405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2611:3188], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:33.887777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:33.969028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2024-11-21T23:05:34.403098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2907:3235], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:34.403256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:34.417734Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2912:3239]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:34.417949Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:05:34.418088Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T23:05:34.418152Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2915:3242] 2024-11-21T23:05:34.418219Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2915:3242] 2024-11-21T23:05:34.418877Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2916:3133] 2024-11-21T23:05:34.427560Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2915:3242], server id = [2:2916:3133], tablet id = 72075186224037897, status = OK 2024-11-21T23:05:34.427817Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2916:3133], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T23:05:34.427914Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T23:05:34.428201Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:05:34.428274Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2912:3239], StatRequests.size() = 1 2024-11-21T23:05:34.446678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2920:3246], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:34.446829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:34.447236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2925:3251], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:34.458174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2024-11-21T23:05:34.650878Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T23:05:34.650973Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T23:05:34.718813Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2915:3242], schemeshard count = 1 2024-11-21T23:05:35.213703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2927:3253], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2024-11-21T23:05:35.625268Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3073:3338]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:35.625459Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:05:35.625496Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3073:3338], StatRequests.size() = 1 2024-11-21T23:05:35.963950Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fjhy1cw3jkxzv1rxpfgq0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM0MGRiYjgtZTY5MTI2LTdmOTZlOWU1LWU2ZTMyODIw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:05:36.156042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899 2024-11-21T23:05:37.486927Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3388:3400]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:37.487162Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T23:05:37.487207Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3388:3400], StatRequests.size() = 1 2024-11-21T23:05:37.527144Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3397:3409]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:37.527396Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2024-11-21T23:05:37.527433Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3397:3409], StatRequests.size() = 1 2024-11-21T23:05:37.670861Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd8fjms9ebbk3v5npgf3t0ye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhhNWEyYTEtYjE3NzMyZTgtODBiY2M4YmYtNDBiYjFlMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:05:37.773401Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3435:3380]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:37.777349Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:05:37.777421Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T23:05:37.777797Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:05:37.777851Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T23:05:37.777899Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T23:05:37.789266Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2024-11-21T23:05:37.789526Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2024-11-21T23:05:37.789825Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3460:3393]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T23:05:37.792553Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:05:37.792612Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T23:05:37.792964Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:05:37.793017Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2024-11-21T23:05:37.793064Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T23:05:37.812419Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2024-11-21T23:05:37.812690Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2024-11-21T23:04:47.812697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:04:47.993924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:04:47.997904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:47.998833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:04:48.010153Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:04:48.018591Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002886/r3tmp/tmpJ4Uul2/pdisk_1.dat 2024-11-21T23:04:48.977164Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:49.395100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:04:49.807940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:49.808806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:49.893089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:49.893178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:49.921180Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:04:49.931231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:49.931647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:50.637252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.259881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1328:2797], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:04:53.261008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1339:2802], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:04:53.261311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:04:53.294845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:04:53.916724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1342:2805], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:04:56.902692Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fh9r1ey2n28160fvw751f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBmZmYwYjMtMTdmM2NmNGItZTZlYjI5MTEtZjY4ZjlhOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2024-11-21T23:04:59.629878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fhdg34f74kb6krhtr1ft6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFkNDgyYWQtMTc1NTNhMWEtNGZjMzQ0MDktMzE4MDRhNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1545:2928] -> [2:1501:2402] -- EvScanData from [2:1550:2409]: pass 2024-11-21T23:05:03.608705Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fhdg34f74kb6krhtr1ft6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFkNDgyYWQtMTc1NTNhMWEtNGZjMzQ0MDktMzE4MDRhNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2024-11-21T23:05:03.614676Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2024-11-21T23:05:34.184760Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:05:34.216015Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:05:34.216354Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:05:34.216836Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:34.218138Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:05:34.218363Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002886/r3tmp/tmpla2Hrq/pdisk_1.dat 2024-11-21T23:05:34.616567Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:35.002243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:05:35.289267Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:35.290256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:35.335286Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:35.335392Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:35.393565Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T23:05:35.397679Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:35.400961Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:35.950289Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:05:37.176294Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1327:2799], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:37.176657Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1338:2804], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:37.179921Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:37.209844Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:05:38.236864Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1341:2807], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:05:41.248545Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fjmmaafdr3hmg9wyfrzj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmEyN2Y5NGQtY2UzYzg2ZjktYzc1ZTcwMmUtMjc4YzM1MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2024-11-21T23:05:43.343038Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fjrrp2102wqdxxswe0gnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTdlYzYyOGQtZjc1ZWFhNWItZGVkZjRhNWItYTlmM2E1YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1544:2930] -> [4:1498:2402] -- EvScanData from [4:1548:2409]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2024-11-21T23:05:43.463778Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_mirror3of4/unittest >> Mirror3of4::ReplicationHuge [GOOD] Test command err: 2024-11-21T23:03:29.848199Z 1 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:0:0]: SKELETON START Marker# BSVS37 2024-11-21T23:03:29.849687Z 2 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:1:0]: SKELETON START Marker# BSVS37 2024-11-21T23:03:29.850596Z 3 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:2:0]: SKELETON START Marker# BSVS37 2024-11-21T23:03:29.851551Z 4 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:3:0]: SKELETON START Marker# BSVS37 2024-11-21T23:03:29.852407Z 5 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:4:0]: SKELETON START Marker# BSVS37 2024-11-21T23:03:29.853457Z 6 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:5:0]: SKELETON START Marker# BSVS37 2024-11-21T23:03:29.854465Z 7 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:6:0]: SKELETON START Marker# BSVS37 2024-11-21T23:03:29.855232Z 8 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:7:0]: SKELETON START Marker# BSVS37 2024-11-21T23:03:29.857337Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:0:0]: LocalRecovery START 2024-11-21T23:03:29.857661Z 1 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:0:0]: Sending TEvYardInit: pdiskGuid# 1883901235851768757 skeletonid# [1:139:13] selfid# [1:155:22] delay 0.000000 sec 2024-11-21T23:03:29.857898Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:1:0]: LocalRecovery START 2024-11-21T23:03:29.858338Z 2 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:1:0]: Sending TEvYardInit: pdiskGuid# 11876262465046372230 skeletonid# [2:140:11] selfid# [2:156:12] delay 0.000000 sec 2024-11-21T23:03:29.858554Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:2:0]: LocalRecovery START 2024-11-21T23:03:29.858589Z 3 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:2:0]: Sending TEvYardInit: pdiskGuid# 17696067057789205427 skeletonid# [3:141:11] selfid# [3:157:12] delay 0.000000 sec 2024-11-21T23:03:29.858812Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:3:0]: LocalRecovery START 2024-11-21T23:03:29.859091Z 4 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:3:0]: Sending TEvYardInit: pdiskGuid# 11927131559300230679 skeletonid# [4:142:11] selfid# [4:158:12] delay 0.000000 sec 2024-11-21T23:03:29.859296Z 5 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:4:0]: LocalRecovery START 2024-11-21T23:03:29.859510Z 5 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:4:0]: Sending TEvYardInit: pdiskGuid# 96398183946875567 skeletonid# [5:143:11] selfid# [5:159:12] delay 0.000000 sec 2024-11-21T23:03:29.859775Z 6 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:5:0]: LocalRecovery START 2024-11-21T23:03:29.859806Z 6 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:5:0]: Sending TEvYardInit: pdiskGuid# 2270517133203737690 skeletonid# [6:144:11] selfid# [6:160:12] delay 0.000000 sec 2024-11-21T23:03:29.859981Z 7 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:6:0]: LocalRecovery START 2024-11-21T23:03:29.860197Z 7 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:6:0]: Sending TEvYardInit: pdiskGuid# 1563926492494346897 skeletonid# [7:145:11] selfid# [7:161:12] delay 0.000000 sec 2024-11-21T23:03:29.860399Z 8 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:7:0]: LocalRecovery START 2024-11-21T23:03:29.860507Z 8 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:7:0]: Sending TEvYardInit: pdiskGuid# 811365447221266436 skeletonid# [8:146:11] selfid# [8:162:12] delay 0.000000 sec 2024-11-21T23:03:29.867045Z 1 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:402} PDiskMock[1:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:0:0] PDiskGuid# 1883901235851768757 CutLogID# [1:139:13] WhiteboardProxyId# [1:122:10]} 2024-11-21T23:03:29.869926Z 1 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:437} PDiskMock[1:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2024-11-21T23:03:29.875031Z 2 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:402} PDiskMock[2:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:1:0] PDiskGuid# 11876262465046372230 CutLogID# [2:140:11] WhiteboardProxyId# [2:124:10]} 2024-11-21T23:03:29.875270Z 2 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:437} PDiskMock[2:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2024-11-21T23:03:29.875601Z 3 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:402} PDiskMock[3:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:2:0] PDiskGuid# 17696067057789205427 CutLogID# [3:141:11] WhiteboardProxyId# [3:126:10]} 2024-11-21T23:03:29.875738Z 3 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:437} PDiskMock[3:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2024-11-21T23:03:29.875951Z 4 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:402} PDiskMock[4:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:3:0] PDiskGuid# 11927131559300230679 CutLogID# [4:142:11] WhiteboardProxyId# [4:128:10]} 2024-11-21T23:03:29.876108Z 4 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:437} PDiskMock[4:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2024-11-21T23:03:29.876550Z 5 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:402} PDiskMock[5:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:4:0] PDiskGuid# 96398183946875567 CutLogID# [5:143:11] WhiteboardProxyId# [5:130:10]} 2024-11-21T23:03:29.876991Z 5 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:437} PDiskMock[5:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2024-11-21T23:03:29.877042Z 6 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:402} PDiskMock[6:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:5:0] PDiskGuid# 2270517133203737690 CutLogID# [6:144:11] WhiteboardProxyId# [6:132:10]} 2024-11-21T23:03:29.877078Z 6 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:437} PDiskMock[6:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2024-11-21T23:03:29.877553Z 7 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:402} PDiskMock[7:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:6:0] PDiskGuid# 1563926492494346897 CutLogID# [7:145:11] WhiteboardProxyId# [7:134:10]} 2024-11-21T23:03:29.877683Z 7 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:437} PDiskMock[7:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2024-11-21T23:03:29.878085Z 8 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:402} PDiskMock[8:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:7:0] PDiskGuid# 811365447221266436 CutLogID# [8:146:11] WhiteboardProxyId# [8:136:10]} 2024-11-21T23:03:29.878306Z 8 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:437} PDiskMock[8:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2024-11-21T23:03:29.884253Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:0:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2024-11-21T23:03:29.888936Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:1:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2024-11-21T23:03:29.890373Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:2:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2024-11-21T23:03:29.892323Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:3:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2024-11-21T23:03:29.896107Z 5 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:4:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2024-11-21T23:03:29.898563Z 6 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:5:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2024-11-21T23:03:29.905093Z 7 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:6:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2024-11-21T23:03 ... 0}{Lsn# 23 Cookie# 11}{Lsn# 24 Cookie# 12}} Recipient# [8:146:11] 2024-11-21T23:05:43.678107Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:525} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 583 Lsn# 25 LsnSegmentStart# 25 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# []}} VDiskId# [0:4294967295:0:6:0] 2024-11-21T23:05:43.678142Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:587} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None{Lsn# 25 Cookie# 0}} Recipient# [7:345:29] 2024-11-21T23:05:43.678308Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:525} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 582 Lsn# 25 LsnSegmentStart# 25 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# []}} VDiskId# [0:4294967295:0:7:0] 2024-11-21T23:05:43.678343Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:587} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None{Lsn# 25 Cookie# 0}} Recipient# [8:355:29] 2024-11-21T23:05:43.811363Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:525} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 583 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# []}} VDiskId# [0:4294967295:0:6:0] 2024-11-21T23:05:43.811452Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:587} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None{Lsn# 26 Cookie# 0}} Recipient# [7:345:29] 2024-11-21T23:05:43.811563Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:525} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 582 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# []}} VDiskId# [0:4294967295:0:7:0] 2024-11-21T23:05:43.811613Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:587} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None{Lsn# 26 Cookie# 0}} Recipient# [8:355:29] 2024-11-21T23:05:43.811930Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:1:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65536} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2024-11-21T23:05:43.812274Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:525} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 583 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# []}} VDiskId# [0:4294967295:0:6:0] 2024-11-21T23:05:43.812330Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:587} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None{Lsn# 27 Cookie# 0}} Recipient# [7:345:29] 2024-11-21T23:05:43.812388Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:1:0]: GLUEREAD(0x50d000050190): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319909952} 2024-11-21T23:05:43.812454Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:525} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 582 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# []}} VDiskId# [0:4294967295:0:7:0] 2024-11-21T23:05:43.812500Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:587} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None{Lsn# 27 Cookie# 0}} Recipient# [8:355:29] 2024-11-21T23:05:43.812590Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:675} PDiskMock[2:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319909952} VDiskId# [0:4294967295:0:1:0] 2024-11-21T23:05:43.813879Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:715} PDiskMock[2:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 89335319909952 StatusFlags# None} 2024-11-21T23:05:43.814040Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:1:0]: GLUEREAD FINISHED(0x50d000050190): actualReadN# 1 origReadN# 1 2024-11-21T23:05:43.814406Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:1:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1369701526376808448} BlockedGeneration# 0} 2024-11-21T23:05:43.847356Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:2:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65536} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2024-11-21T23:05:43.848114Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:2:0]: GLUEREAD(0x50d0000b55b0): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319945792} 2024-11-21T23:05:43.848506Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:675} PDiskMock[3:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319945792} VDiskId# [0:4294967295:0:2:0] 2024-11-21T23:05:43.849390Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:715} PDiskMock[3:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 89335319945792 StatusFlags# None} 2024-11-21T23:05:43.849537Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:2:0]: GLUEREAD FINISHED(0x50d0000b55b0): actualReadN# 1 origReadN# 1 2024-11-21T23:05:43.849641Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:2:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:1] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 2522623030983655424} BlockedGeneration# 0} 2024-11-21T23:05:43.851714Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:3:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65536} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2024-11-21T23:05:43.851914Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:3:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2024-11-21T23:05:43.852467Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:4:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65536} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2024-11-21T23:05:43.852595Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:4:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2024-11-21T23:05:43.853057Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:5:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65536} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2024-11-21T23:05:43.853211Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:5:0]: GLUEREAD(0x50d0000bd610): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319887936} 2024-11-21T23:05:43.853278Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:675} PDiskMock[6:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 89335319887936} VDiskId# [0:4294967295:0:5:0] 2024-11-21T23:05:43.854012Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:715} PDiskMock[6:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 89335319887936 StatusFlags# None} 2024-11-21T23:05:43.854066Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:5:0]: GLUEREAD FINISHED(0x50d0000bd610): actualReadN# 1 origReadN# 1 2024-11-21T23:05:43.854153Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:5:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1946162278680231936} {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 1946162278680231936} BlockedGeneration# 0} 2024-11-21T23:05:43.887615Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:6:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65536} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2024-11-21T23:05:43.887883Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:6:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} 2024-11-21T23:05:43.888557Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:7:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65536} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2024-11-21T23:05:43.888696Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:7:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} >> DataStreams::TestUnsupported [GOOD] >> TBackupCollectionTests::CreateAbsolutePath |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2024-11-21T23:04:42.324437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872410478288707:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:42.324481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bfb/r3tmp/tmpThlrmX/pdisk_1.dat 2024-11-21T23:04:42.858167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:42.860527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:42.867884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:42.870547Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9617, node 1 2024-11-21T23:04:43.141030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:43.141050Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:43.141062Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:43.141148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:43.539833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:43.551290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:43.551366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:43.559187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:04:43.559387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:04:43.559398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:04:43.567183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:04:43.567212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:04:43.574007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:43.579141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:43.589526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230283637, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:43.589576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:43.589824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:04:43.591712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:43.591888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:43.591935Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:04:43.592013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:04:43.592049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:04:43.592095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:04:43.597116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:04:43.597164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:04:43.597186Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:04:43.597251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:04:43.673871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:43.674084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:43.674128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:43.674194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:04:43.674273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:04:43.674287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:04:43.679805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:04:43.679952Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:43.680219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:43.681289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:04:43.681332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:04:43.681346Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:04:43.681416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:20315 2024-11-21T23:04:43.854135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:43.854342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:43.854364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:43.857495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:04:43.857636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:04:43.863108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230283910, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:43.863153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230283910, at schemeshard: 72057594046644480 2024-11-21T23:04:43.863417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:04:43.863492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:04:43.863550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:04:43.866188Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:04:43.866427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:43.866966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:43.868489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:04:43.868531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:04:43.868545Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:04:43.868602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T23:04:43.903105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestUpdateStorage, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:04:43.903467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:43.906057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: user@builtin, status: Stat ... event=undelivered;self_id=[7:7439872615405216125:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:05:29.553196Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bfb/r3tmp/tmpicZOSa/pdisk_1.dat 2024-11-21T23:05:31.811572Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:31.904007Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:32.258555Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:32.258673Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:32.268730Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9386, node 7 2024-11-21T23:05:33.341996Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:05:33.342019Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:05:33.342213Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:05:33.342350Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:05:34.559660Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7439872615405216125:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:05:34.566616Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:62395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:05:36.419281Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:05:36.419642Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:05:36.419669Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:05:36.429550Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:05:36.429735Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:05:36.429749Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T23:05:36.432047Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:05:36.432074Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:05:36.433563Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:05:36.437577Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230336480, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:05:36.437623Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:05:36.437952Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:05:36.440274Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:05:36.440418Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:05:36.440462Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:05:36.440539Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:05:36.440571Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:05:36.440611Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T23:05:36.442435Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:05:36.442483Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:05:36.442503Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:05:36.442576Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T23:05:36.445170Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:05:37.444439Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:05:37.450648Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:05:37.450674Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:05:37.450818Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T23:05:37.450881Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T23:05:37.450890Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T23:05:37.517453Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:05:37.517613Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:05:37.517839Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:05:37.528570Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:05:37.528965Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:05:37.534414Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:05:37.535160Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 TClient is connected to server localhost:62395 2024-11-21T23:05:41.857860Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:05:41.858028Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:05:41.858048Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:05:41.904403Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:05:41.905002Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:05:41.931715Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230341961, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:05:41.931766Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732230341961, at schemeshard: 72057594046644480 2024-11-21T23:05:41.932354Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T23:05:41.932435Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T23:05:41.932465Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 0 2024-11-21T23:05:41.939693Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T23:05:41.971363Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:05:41.971550Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:05:41.974093Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:05:41.974131Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:05:41.974151Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:05:41.974210Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 >> EvWrite::WriteWithSplit >> Normalizers::CleanEmptyPortionsNormalizer >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> Normalizers::SchemaVersionsNormalizer >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> KqpScan::ScanPg >> LocalTableWriter::WriteTable [GOOD] |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::Create [GOOD] >> LocalTableWriter::ConsistentWrite [GOOD] >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TBackupCollectionTests::CreateTwice >> TBackupCollectionTests::DisallowedPath >> TPersQueueTest::Init |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] >> TBackupCollectionTests::CreateTwice [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> TBackupCollectionTests::BackupAbsentCollection >> TBackupCollectionTests::DisallowedPath [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> TBackupCollectionTests::ParallelCreate |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2024-11-21T23:05:36.335654Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872645033152590:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:05:36.387588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001136/r3tmp/tmp42OLhP/pdisk_1.dat 2024-11-21T23:05:37.839077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:40.136332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:41.275817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:41.276215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:41.348919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:41.652982Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872645033152590:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:05:41.653481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:05:41.680574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:41.681674Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18738 TServer::EnableGrpc on GrpcPort 30245, node 1 2024-11-21T23:05:44.953664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:05:44.953689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:05:44.953849Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:05:44.954273Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:05:46.102069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:05:46.406800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732230347176 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2024-11-21T23:05:47.322781Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872692277793482:2368] Handshake: worker# [1:7439872687982826089:2304] 2024-11-21T23:05:47.323859Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872692277793482:2368] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:05:47.324599Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872692277793482:2368] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T23:05:47.339751Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872692277793482:2368] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 36b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 36b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T23:05:47.340518Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872692277793482:2368] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2024-11-21T23:05:47.341576Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872692277793486:2368] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T23:05:47.358876Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872692277793482:2368] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:05:47.359849Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872692277793486:2368] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T23:05:47.413665Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872692277793486:2368] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T23:05:47.413741Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872692277793482:2368] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:05:47.413793Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872692277793482:2368] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> TDatabaseResolverTests::PostgreSQL >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> DataStreams::TestPutRecordsCornerCases [GOOD] >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection |79.2%| [TM] {RESULT} ydb/core/blobstorage/ut_mirror3of4/unittest |79.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2024-11-21T23:05:53.042227Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:05:53.137602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:05:53.161232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:05:53.161316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:05:53.161563Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:05:53.169940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2024-11-21T23:05:53.170194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:53.170371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:53.170556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:53.170675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:53.170779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:53.170933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:53.171039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:53.171141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:53.171265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:53.171375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.171496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:53.171597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:53.195234Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:53.198446Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:05:53.198721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2024-11-21T23:05:53.198783Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2024-11-21T23:05:53.199045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:53.199117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2024-11-21T23:05:53.199147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2024-11-21T23:05:53.199253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:53.199316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:05:53.199359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:05:53.199398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2024-11-21T23:05:53.199481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:05:53.199561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:05:53.199609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:05:53.199649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2024-11-21T23:05:53.199855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:53.199914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:05:53.199965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:05:53.200006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2024-11-21T23:05:53.200101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:05:53.200155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:05:53.200190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:05:53.200214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:05:53.200273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:05:53.200371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:05:53.200402Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:05:53.200455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:05:53.200499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:05:53.200526Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:05:53.200680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2024-11-21T23:05:53.200765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2024-11-21T23:05:53.200859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2024-11-21T23:05:53.200926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2024-11-21T23:05:53.201078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:05:53.201128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:05:53.201157Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:05:53.201329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:05:53.201381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.201409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.201553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:05:53.201593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;flin ... 1;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T23:05:54.541762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.541810Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T23:05:54.541856Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:05:54.541966Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T23:05:54.542057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.542088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T23:05:54.542113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:05:54.542296Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T23:05:54.542331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T23:05:54.542374Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=2; 2024-11-21T23:05:54.542452Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2024-11-21T23:05:54.542504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T23:05:54.542599Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.542629Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2024-11-21T23:05:54.542664Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:05:54.542829Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:05:54.542998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.543043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:05:54.543160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2024-11-21T23:05:54.543298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2024-11-21T23:05:54.543425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] send ScanData to [1:323:2330] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 2405760 rows: 20048 page faults: 0 finished: 0 pageFault: 0 arrow schema: key1: uint64 key2: uint64 field: string 2024-11-21T23:05:54.543579Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.543710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.543819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.544782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:05:54.544936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.545047Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:05:54.545085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] finished for tablet 9437184 2024-11-21T23:05:54.545167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:325:2331] send ScanData to [1:323:2330] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:05:54.545689Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:325:2331] and sent to [1:323:2330] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.07},{"events":["f_ack"],"t":0.071},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.073}],"full":{"a":1732230354471789,"name":"_full_task","f":1732230354471789,"d_finished":0,"c":0,"l":1732230354545225,"d":73436},"events":[{"name":"bootstrap","f":1732230354471968,"d_finished":2804,"c":1,"l":1732230354474772,"d":2804},{"a":1732230354544754,"name":"ack","f":1732230354542804,"d_finished":1041,"c":1,"l":1732230354543845,"d":1512},{"a":1732230354544735,"name":"processing","f":1732230354475792,"d_finished":61401,"c":9,"l":1732230354543848,"d":61891},{"name":"ProduceResults","f":1732230354473424,"d_finished":2834,"c":12,"l":1732230354545071,"d":2834},{"a":1732230354545074,"name":"Finish","f":1732230354545074,"d_finished":0,"c":0,"l":1732230354545225,"d":151},{"name":"task_result","f":1732230354475809,"d_finished":60233,"c":8,"l":1732230354542703,"d":60233}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2024-11-21T23:05:54.545793Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:05:54.471338Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=256192;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=256192;selected_rows=0; 2024-11-21T23:05:54.545848Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:05:54.545917Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T23:05:54.546061Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:325:2331];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2024-11-21T23:05:36.106096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872645711369802:2236];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:05:36.106535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001135/r3tmp/tmp2Cpuoi/pdisk_1.dat 2024-11-21T23:05:38.030518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:40.792829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:40.793513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:40.852627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:41.046541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:41.720575Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:41.729689Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872645711369802:2236];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:05:41.739165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:30541 TServer::EnableGrpc on GrpcPort 12117, node 1 2024-11-21T23:05:45.289264Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:05:45.289289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:05:45.289297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:05:45.289413Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:05:47.148030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:05:47.229582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732230348709 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2024-11-21T23:05:49.135084Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handshake: worker# [1:7439872692956010483:2310] 2024-11-21T23:05:49.136140Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:05:49.146957Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T23:05:49.148545Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 48b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T23:05:49.179359Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2024-11-21T23:05:49.180272Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2024-11-21T23:05:49.181236Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872701545945178:2376] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T23:05:49.181433Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:05:49.191040Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872701545945178:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2024-11-21T23:05:49.231056Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872701545945178:2376] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T23:05:49.231125Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:05:49.231171Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2024-11-21T23:05:49.254994Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 4 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T23:05:49.263322Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 5 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T23:05:49.278897Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 7 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T23:05:49.280752Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2024-11-21T23:05:49.281730Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2024-11-21T23:05:49.283553Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872701545945178:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2024-11-21T23:05:49.303633Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872701545945178:2376] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T23:05:49.303689Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:05:49.304407Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2024-11-21T23:05:49.335288Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 9 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 49b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T23:05:49.335752Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2024-11-21T23:05:49.336239Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872701545945178:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2024-11-21T23:05:49.352224Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872701545945178:2376] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T23:05:49.367894Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:05:49.368331Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2024-11-21T23:05:49.378639Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872701545945175:2376] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 11 Data: 19b CreateTime: 1970-01-01T00:00:00Z }] } >> LocalTableWriter::SupportedTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2024-11-21T23:05:55.667681Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |79.2%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> EvWrite::WriteWithSplit [GOOD] |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |79.3%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecordsCornerCases [GOOD] Test command err: 2024-11-21T23:04:45.712064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872424083251783:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:45.712575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bc3/r3tmp/tmpB9iHMg/pdisk_1.dat 2024-11-21T23:04:48.740590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:49.100695Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:49.126945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:49.127088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:49.393048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19997, node 1 2024-11-21T23:04:50.718827Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872424083251783:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:50.718910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:04:51.443027Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:51.443045Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:51.443051Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:51.443125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:54.033068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.041261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:54.041343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.059320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:04:54.059524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:04:54.059549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:04:54.063638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:54.067688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:04:54.067714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:04:54.075244Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.083619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230294123, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:54.083663Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:54.083979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:04:54.089916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:54.090085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:54.090139Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:04:54.090240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:04:54.090290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:04:54.090338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:04:54.093151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:04:54.093209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:04:54.093225Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:04:54.093352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:04:54.366380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.368422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:54.368721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.369025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:04:54.369328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:04:54.369341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:04:54.386430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:04:54.387063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:54.388327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:54.411043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:04:54.411119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:04:54.411135Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:04:54.411449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:25889 2024-11-21T23:04:58.880193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:58.880367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:58.880383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:58.881974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:04:58.882091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:04:58.885364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230298932, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:58.885392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230298932, at schemeshard: 72057594046644480 2024-11-21T23:04:58.885567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:04:58.885641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:04:58.885670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:04:58.886922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:58.887054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:58.888015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:04:58.888039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:04:58.888059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:04:58.888103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T23:04:58.915504Z node 1 :FLAT_TX_SCHEM ... OK TabletId: 72075186224037889 Generation: 1 2024-11-21T23:05:53.924480Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037888 Generation: 1 2024-11-21T23:05:53.924538Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037892 Generation: 1 2024-11-21T23:05:53.928562Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 2024-11-21T23:05:53.928619Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1) EndOffset 2 readOffset 0 committedOffset 0 2024-11-21T23:05:53.928644Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3) EndOffset 0 readOffset 0 committedOffset 0 2024-11-21T23:05:53.929511Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) EndOffset 2 readOffset 0 committedOffset 0 2024-11-21T23:05:53.929555Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 INIT DONE TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4) EndOffset 8 readOffset 0 committedOffset 0 2024-11-21T23:05:53.929951Z :INFO: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] Confirm partition stream create. Partition stream id: 1. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 3. Read offset: (empty maybe) 2024-11-21T23:05:53.930082Z :INFO: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] Confirm partition stream create. Partition stream id: 2. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 4. Read offset: (empty maybe) 2024-11-21T23:05:53.930615Z :INFO: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] Confirm partition stream create. Partition stream id: 3. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 2. Read offset: (empty maybe) 2024-11-21T23:05:53.930637Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2), readOffset# 0, commitOffset# 0 2024-11-21T23:05:53.930888Z :INFO: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] Confirm partition stream create. Partition stream id: 4. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 0. Read offset: (empty maybe) 2024-11-21T23:05:53.930693Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T23:05:53.931322Z :INFO: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] Confirm partition stream create. Partition stream id: 5. Cluster: "". Topic: "stream_TestPutRecordsCornerCases". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:05:53.931228Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1), readOffset# 0, commitOffset# 0 2024-11-21T23:05:53.931278Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 4(assignId:1) EndOffset 2 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T23:05:53.940946Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3), readOffset# 0, commitOffset# 0 2024-11-21T23:05:53.941139Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3) EndOffset 0 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T23:05:53.947594Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5), readOffset# 0, commitOffset# 0 2024-11-21T23:05:53.947689Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 0(assignId:5) EndOffset 2 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T23:05:53.948210Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4), readOffset# 0, commitOffset# 0 2024-11-21T23:05:53.948281Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4) EndOffset 8 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2024-11-21T23:05:54.014173Z :DEBUG: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:05:54.014576Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2024-11-21T23:05:54.014588Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2024-11-21T23:05:54.014916Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2024-11-21T23:05:54.014972Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2024-11-21T23:05:54.014993Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2024-11-21T23:05:54.015013Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {3, 0} (3-3) 2024-11-21T23:05:54.015060Z :DEBUG: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2024-11-21T23:05:54.283633Z :DEBUG: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:05:54.285183Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2024-11-21T23:05:54.285259Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2024-11-21T23:05:54.285297Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-0) 2024-11-21T23:05:54.285335Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:05:54.285361Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T23:05:54.285460Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2024-11-21T23:05:54.285489Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2024-11-21T23:05:54.285521Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2024-11-21T23:05:54.285561Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2024-11-21T23:05:54.285594Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2024-11-21T23:05:54.285658Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T23:05:54.285691Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T23:05:54.285729Z :DEBUG: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2024-11-21T23:05:54.320302Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2024-11-21T23:05:54.320351Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2024-11-21T23:05:54.320394Z :DEBUG: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2024-11-21T23:05:54.320545Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2024-11-21T23:05:54.320567Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2024-11-21T23:05:54.320588Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2024-11-21T23:05:54.320621Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2024-11-21T23:05:54.320645Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2024-11-21T23:05:54.320666Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2024-11-21T23:05:54.320684Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2024-11-21T23:05:54.320707Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2024-11-21T23:05:54.320741Z :DEBUG: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] The application data is transferred to the client. Number of messages 8, size 8388608 bytes 2024-11-21T23:05:54.323259Z :INFO: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] Closing read session. Close timeout: 0.000000s 2024-11-21T23:05:54.323340Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:4:2:1:0 null:stream_TestPutRecordsCornerCases:3:1:3:0 null:stream_TestPutRecordsCornerCases:2:3:0:0 null:stream_TestPutRecordsCornerCases:1:5:7:0 null:stream_TestPutRecordsCornerCases:0:4:1:0 2024-11-21T23:05:54.323384Z :INFO: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] Counters: { Errors: 0 CurrentSessionLifetimeMs: 476 BytesRead: 9437696 MessagesRead: 16 BytesReadCompressed: 9437696 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:05:54.323517Z :NOTICE: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:05:54.323567Z :DEBUG: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] [null] Abort session to cluster 2024-11-21T23:05:54.324181Z :NOTICE: [/Root/] [/Root/] [4927d524-1daa727a-e4c2b95d-21218ab] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:05:54.326598Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 grpc read failed 2024-11-21T23:05:54.326648Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 grpc closed 2024-11-21T23:05:54.326695Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_5410348033367538606_v1 is DEAD >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::Drop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2024-11-21T23:05:45.014067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872681396574876:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:05:45.016091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001102/r3tmp/tmppCIQNp/pdisk_1.dat 2024-11-21T23:05:47.070063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:47.175516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:47.175640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:47.183840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:47.860823Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:48.506880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:50.022352Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872681396574876:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:05:50.022851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:30349 TServer::EnableGrpc on GrpcPort 63469, node 1 2024-11-21T23:05:50.182384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:05:50.182432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:05:50.182446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:05:50.182543Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:05:53.271601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:05:53.291272Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:05:53.332171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732230353581 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "uint32_value" Type: "... (TRUNCATED) 2024-11-21T23:05:53.649895Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872715756313945:2364] Handshake: worker# [1:7439872715756313852:2303] 2024-11-21T23:05:53.650137Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872715756313945:2364] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:05:53.650437Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872715756313945:2364] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T23:05:53.653777Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872715756313945:2364] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Offset: 1 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 2 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 3 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 4 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 5 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 6 Data: 41b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 7 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 8 Data: 44b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 9 Data: 66b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 10 Data: 71b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 11 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 12 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 13 Data: 48b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 14 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 15 Data: 58b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 16 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 17 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 18 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 19 Data: 76b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 20 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 21 Data: 54b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 22 Data: 61b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 23 Data: 51b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 24 Data: 45b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 25 Data: 46b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 26 Data: 47b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 27 Data: 50b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 28 Data: 49b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 29 Data: 72b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 30 Data: 57b CreateTime: 1970-01-01T00:00:00Z },{ Offset: 31 Data: 64b CreateTime: 1970-01-01T00:00:00Z }] } 2024-11-21T23:05:53.656049Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872715756313945:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2024-11-21T23:05:53.656572Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872715756313948:2364] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T23:05:53.656621Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872715756313945:2364] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:05:53.656910Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872715756313948:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2024-11-21T23:05:53.700269Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7439872715756313948:2364] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T23:05:53.700349Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872715756313945:2364] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:05:53.700455Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7439872715756313945:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |79.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2024-11-21T23:05:53.626475Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:05:53.713360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:05:53.735632Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:05:53.735716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:05:53.735955Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:05:53.744210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:05:53.744443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:53.744689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:53.744800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:53.744904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:53.745041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:53.745157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:53.745269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:53.745370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:53.745496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.745615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:53.745745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:53.769397Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:53.775326Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:05:53.775648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:05:53.775703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:05:53.775869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:53.776066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:05:53.776146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:05:53.776201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:05:53.776307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:05:53.776373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:05:53.776426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:05:53.776467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:05:53.776664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:53.776737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:05:53.776778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:05:53.776820Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:05:53.776915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:05:53.776974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:05:53.777020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:05:53.777049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:05:53.777118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:05:53.777158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:05:53.777185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:05:53.777236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:05:53.777272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:05:53.777301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:05:53.777518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2024-11-21T23:05:53.777615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2024-11-21T23:05:53.777742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2024-11-21T23:05:53.777842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2024-11-21T23:05:53.777991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:05:53.778045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:05:53.778079Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:05:53.778283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:05:53.778333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.778367Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.778740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:05:53.778791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:05:53.778824Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:05:53.779039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:05:53.779089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:05:53.779124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:05:53.779248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... nId: 0 gen: 0 tablet: 9437184 bytes: 8400896 rows: 1024 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2024-11-21T23:05:56.519464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.519590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.519634Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T23:05:56.519673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:05:56.524768Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:05:56.524927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.524978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T23:05:56.525095Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:219;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.525312Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T23:05:56.525356Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T23:05:56.525400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=1; 2024-11-21T23:05:56.525448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=1024;merger=0;interval_id=1; 2024-11-21T23:05:56.525498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T23:05:56.525627Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.525679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=1024;finished=1; 2024-11-21T23:05:56.525792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=1024; 2024-11-21T23:05:56.525857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=8400896;num_rows=1024;batch_columns=key,field; 2024-11-21T23:05:56.526055Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] send ScanData to [1:264:2279] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 8400896 rows: 1024 page faults: 0 finished: 0 pageFault: 0 arrow schema: key: uint64 field: string 2024-11-21T23:05:56.526209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.526306Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.531423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.534328Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:05:56.538990Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.539145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2024-11-21T23:05:56.539201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] finished for tablet 9437184 2024-11-21T23:05:56.539298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:272:2287] send ScanData to [1:264:2279] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:05:56.539761Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:272:2287] and sent to [1:264:2279] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.019},{"events":["f_ProduceResults"],"t":0.021},{"events":["l_bootstrap"],"t":0.022},{"events":["f_processing","f_task_result"],"t":0.024},{"events":["f_ack"],"t":0.17},{"events":["l_task_result"],"t":0.217},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.225}],"full":{"a":1732230356313941,"name":"_full_task","f":1732230356313941,"d_finished":0,"c":0,"l":1732230356539354,"d":225413},"events":[{"name":"bootstrap","f":1732230356333034,"d_finished":3554,"c":1,"l":1732230356336588,"d":3554},{"a":1732230356534293,"name":"ack","f":1732230356484937,"d_finished":35158,"c":2,"l":1732230356525129,"d":40219},{"a":1732230356534266,"name":"processing","f":1732230356338518,"d_finished":102308,"c":11,"l":1732230356531549,"d":107396},{"name":"ProduceResults","f":1732230356335260,"d_finished":47366,"c":15,"l":1732230356539173,"d":47366},{"a":1732230356539178,"name":"Finish","f":1732230356539178,"d_finished":0,"c":0,"l":1732230356539354,"d":176},{"name":"task_result","f":1732230356338541,"d_finished":66934,"c":9,"l":1732230356531543,"d":66934}],"id":"9437184::1"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;) 2024-11-21T23:05:56.539866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:05:56.312995Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14766240;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14766240;selected_rows=0; 2024-11-21T23:05:56.539944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:05:56.540068Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.087157s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.085237s;size=0.014766232;details={columns=1,2,4294967040,4294967041,4294967042;};};]};; 2024-11-21T23:05:56.540164Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:272:2287];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> DataStreams::TestShardPagination [GOOD] >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> TDatabaseResolverTests::MySQL >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation |79.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2024-11-21T23:04:41.298985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872409892607718:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:41.299106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c15/r3tmp/tmpkTINRF/pdisk_1.dat 2024-11-21T23:04:42.896746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:44.466939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:44.915454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:44.915751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:44.940941Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:45.025621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:45.756354Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.139732s 2024-11-21T23:04:45.756718Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.158412s TServer::EnableGrpc on GrpcPort 6767, node 1 2024-11-21T23:04:46.447672Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872409892607718:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:46.494804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:04:48.087549Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:48.087566Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:48.088059Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:48.088663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:49.110321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:49.116561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:49.116643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:49.122497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:04:49.122716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:04:49.122751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:04:49.127355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:04:49.127383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T23:04:49.129585Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:49.131529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:49.135613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230289181, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:49.135651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:49.142307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:04:49.161464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:49.163234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:49.163432Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:04:49.163972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:04:49.164323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:04:49.164377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:04:49.178018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:04:49.178172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:04:49.178312Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:04:49.178571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:04:49.947901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:49.948163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:49.948181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:49.948245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:04:49.948322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:04:49.948334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:04:49.957259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:04:49.957583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:49.982031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:49.992808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:04:49.992867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:04:49.992882Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:04:49.998755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:17926 2024-11-21T23:04:53.095360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.095541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:53.095559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.106153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:04:53.107156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:04:53.134700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230293178, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:53.134734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230293178, at schemeshard: 72057594046644480 2024-11-21T23:04:53.134926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:04:53.135018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:04:53.135043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:04:53.143560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:04:53.144347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:53.144453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:53.152841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: ... 46644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:10091 2024-11-21T23:05:52.627057Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:05:52.627264Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:05:52.627299Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:05:52.631407Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:05:52.631582Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:05:52.635905Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:05:52.637274Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230352678, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:05:52.637315Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230352678, at schemeshard: 72057594046644480 2024-11-21T23:05:52.637592Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:05:52.637667Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:05:52.637704Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T23:05:52.639204Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:05:52.639387Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:05:52.640642Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:05:52.640686Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:05:52.640706Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:05:52.640765Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T23:05:52.708352Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestShardPagination, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:05:52.708870Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:05:52.713482Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestShardPagination 2024-11-21T23:05:52.713713Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:05:52.713990Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:05:52.714053Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T23:05:52.716213Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:05:52.716267Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:05:52.716284Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:05:52.716489Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:05:52.716514Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:05:52.716525Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T23:05:52.720195Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T23:05:52.724842Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.725105Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.725224Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.725325Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.725469Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.725612Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.725751Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.725886Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.726006Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.726147Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:05:52.726181Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 1 -> 3 2024-11-21T23:05:52.732205Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:05:52.812572Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.823650Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.823909Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.824172Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.824349Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.827060Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.828173Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.830151Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.836009Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.866793Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:05:52.866963Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 3 -> 128 2024-11-21T23:05:52.869789Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:05:52.874606Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230352916, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:05:52.874654Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976710660:0 HandleReply TEvOperationPlan, step: 1732230352916, at tablet: 72057594046644480 2024-11-21T23:05:52.874859Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 128 -> 240 2024-11-21T23:05:52.877784Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:05:52.878204Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:05:52.878273Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:0 ProgressState 2024-11-21T23:05:52.878372Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:0 progress is 1/1 2024-11-21T23:05:52.878556Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2024-11-21T23:05:52.878984Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710660, publications: 2, subscribers: 1 2024-11-21T23:05:52.880562Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:05:52.880617Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:05:52.880637Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T23:05:52.880856Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:05:52.880890Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:05:52.880907Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:05:52.880958Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710660, subscribers: 1 >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied >> TBsVDiskOutOfSpace::WriteUntilYellowZone [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::Greenplum_MasterNode >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2024-11-21T23:06:02.198611Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2024-11-21T23:06:02.634819Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2024-11-21T23:06:02.862722Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupAbsentDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:05:52.824759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:05:52.824846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:52.824894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:05:52.824947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:05:52.824987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:05:52.825013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:05:52.825072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:52.825420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:05:52.897285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:05:52.897348Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:52.907552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:05:52.911962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:05:52.912144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:05:52.918855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:05:52.919441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:05:52.920162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:52.920429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:05:52.926241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:52.927605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:52.927669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:52.927877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:05:52.927924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:52.927970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:05:52.928071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:05:52.936133Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:05:53.042272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:05:53.042678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.042899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:05:53.043118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:05:53.043180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.045586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:53.045725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:05:53.046023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.046097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:05:53.046149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:05:53.046182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:05:53.048091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.048147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:05:53.048185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:05:53.049927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.049971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.050013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:53.050054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:05:53.053738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:05:53.055571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:05:53.055768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:05:53.056778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:53.056894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:05:53.056936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:53.057248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:05:53.057303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:53.057470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:53.057565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:05:53.060543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:53.060591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:53.060744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:53.060780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:05:53.061172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.061223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:05:53.061314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:05:53.061347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:53.061404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:05:53.061454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:53.061488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:05:53.061514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:05:53.061576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:05:53.061612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:05:53.061642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:05:53.063355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:53.063465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:53.063512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:05:53.063557Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:05:53.063596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:53.063684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... :202:2205], at schemeshard: 72057594046678944, txId: 103, path id: 4 2024-11-21T23:06:01.886060Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:06:01.886095Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T23:06:01.887387Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:01.887416Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T23:06:01.887747Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:06:01.888104Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T23:06:01.889378Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:06:01.889730Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:06:01.890061Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:06:01.898830Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:06:01.899145Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T23:06:01.899507Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2024-11-21T23:06:01.899824Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 4], 1 2024-11-21T23:06:01.901476Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:202:2205], Recipient [6:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 5 } 2024-11-21T23:06:01.901508Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T23:06:01.901568Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:06:01.901624Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:06:01.901647Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:06:01.901687Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T23:06:01.901723Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:06:01.901804Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:01.902414Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:202:2205], Recipient [6:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Version: 1 } 2024-11-21T23:06:01.902446Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T23:06:01.902490Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:06:01.902540Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:06:01.902561Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:06:01.902582Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 1 2024-11-21T23:06:01.902605Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:06:01.902676Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T23:06:01.902713Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:01.922930Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:01.923717Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:06:01.923747Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:01.927087Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:06:01.927119Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:06:01.927316Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:06:01.927896Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:06:01.929419Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:351:2343], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:06:01.930329Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:06:01.930670Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:06:01.931070Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:296:2288], Recipient [6:122:2148]: NKikimrScheme.TEvNotifyTxCompletion TxId: 103 2024-11-21T23:06:01.931401Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T23:06:01.931754Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:06:01.932418Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:06:01.932774Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:349:2341] 2024-11-21T23:06:01.933490Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:351:2343], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:01.933841Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:01.934283Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2024-11-21T23:06:01.936563Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:352:2344], Recipient [6:122:2148]: {TEvModifySchemeTransaction txid# 104 TabletId# 72057594046678944} 2024-11-21T23:06:01.936939Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:06:01.946587Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupBackupCollection BackupBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:06:01.952064Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 104:0, explain: Check failed: path: '/MyRoot/Table1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2024-11-21T23:06:01.952750Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Table1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2024-11-21T23:06:01.953490Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:01.963984Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:06:01.965175Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/Table1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: BACKUP, path: /MyRoot/.backups/collections/MyCollection1 2024-11-21T23:06:01.965755Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T23:06:01.967387Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T23:06:01.967440Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T23:06:01.967803Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:358:2350], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:06:01.967855Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:06:01.967891Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:06:01.968007Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:296:2288], Recipient [6:122:2148]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2024-11-21T23:06:01.968046Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T23:06:01.968110Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:06:01.968230Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:06:01.968270Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [6:356:2348] 2024-11-21T23:06:01.968425Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:358:2350], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:01.968457Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:01.968492Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2024-11-21T23:06:02.728356Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2024-11-21T23:06:03.132687Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |79.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |79.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction >> TDatabaseResolverTests::ClickHouseNative ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] Test command err: None domains 1 new (ns): 213.0836124 None domains 1 old (ns): 231.1983369 None domains 9 new (ns): 293.17921 None domains 9 old (ns): 131.4712102 Mirror3 domains 4 new (ns): 260.9315704 Mirror3 domains 4 old (ns): 147.0383532 Mirror3 domains 9 new (ns): 198.4263535 Mirror3 domains 9 old (ns): 170.5762339 4Plus2Block domains 8 new (ns): 175.1709112 4Plus2Block domains 8 old (ns): 75.03786069 4Plus2Block domains 9 new (ns): 140.9012173 4Plus2Block domains 9 old (ns): 66.56102719 ErasureMirror3of4 domains 8 new (ns): 195.918266 ErasureMirror3of4 domains 8 old (ns): 90.84075244 ErasureMirror3of4 domains 9 new (ns): 135.0868604 ErasureMirror3of4 domains 9 old (ns): 73.83883987 >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |79.3%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:05:53.018266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:05:53.018591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:53.018799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:05:53.018833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:05:53.018974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:05:53.019080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:05:53.019254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:53.021621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:05:53.306786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:05:53.306841Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:53.345643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:05:53.345976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:05:53.346163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:05:53.362370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:05:53.364281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:05:53.365051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:53.365333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:05:53.369720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:53.371045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:53.371103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:53.371326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:05:53.371375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:53.371415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:05:53.371495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.380343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:05:53.580983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:05:53.581200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.581398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:05:53.581574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:05:53.581629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.587690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:53.587812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:05:53.588041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.588107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:05:53.588138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:05:53.588166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:05:53.590377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.590443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:05:53.590473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:05:53.592028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.592061Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.592104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:53.592148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:05:53.595356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:05:53.597011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:05:53.597166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:05:53.597965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:53.598091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:05:53.598140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:53.598408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:05:53.598451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:53.598592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:53.598694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:05:53.605479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:53.605539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:53.605723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:53.605759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:05:53.606067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:53.606109Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:05:53.606197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:05:53.606226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:53.606269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:05:53.606316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:53.606347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:05:53.606419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:05:53.606502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:05:53.606560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:05:53.606601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:05:53.611750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:53.612652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:53.612780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:05:53.613080Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:05:53.613229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:53.613754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T23:06:05.778613Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:05.778960Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [6:572:2533], Recipient [6:122:2148]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1555 } } 2024-11-21T23:06:05.778997Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T23:06:05.779082Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1555 } } 2024-11-21T23:06:05.779116Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2024-11-21T23:06:05.779226Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1555 } } 2024-11-21T23:06:05.779326Z node 6 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 1555 } } 2024-11-21T23:06:05.779365Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:05.779724Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:202:2205], Recipient [6:122:2148]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2024-11-21T23:06:05.779760Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T23:06:05.779808Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T23:06:05.779865Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T23:06:05.779889Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2024-11-21T23:06:05.779944Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2024-11-21T23:06:05.779975Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2024-11-21T23:06:05.780055Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2024-11-21T23:06:05.780093Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:05.780327Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:628:2581], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:06:05.780358Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:06:05.780384Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:06:05.780582Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [6:572:2533], Recipient [6:122:2148]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 572 RawX2: 25769806309 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T23:06:05.780619Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2024-11-21T23:06:05.780710Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 572 RawX2: 25769806309 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T23:06:05.780749Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2024-11-21T23:06:05.780916Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 572 RawX2: 25769806309 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T23:06:05.780980Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:06:05.781074Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 572 RawX2: 25769806309 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2024-11-21T23:06:05.781145Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:06:05.781191Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2024-11-21T23:06:05.781240Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T23:06:05.781287Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:1 129 -> 240 2024-11-21T23:06:05.781450Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:05.804284Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:05.804461Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:05.823393Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T23:06:05.823459Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:05.824197Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T23:06:05.824215Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:05.824572Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2024-11-21T23:06:05.824595Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:05.824664Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T23:06:05.824681Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:05.825043Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2024-11-21T23:06:05.825469Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:05.825893Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 106:1 2024-11-21T23:06:05.840961Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:572:2533] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2024-11-21T23:06:05.856063Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:122:2148], Recipient [6:122:2148]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T23:06:05.856113Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T23:06:05.856407Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2024-11-21T23:06:05.856760Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:1 ProgressState 2024-11-21T23:06:05.874049Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:05.874141Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2024-11-21T23:06:05.874187Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2024-11-21T23:06:05.874244Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2024-11-21T23:06:05.874317Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:296:2288] message: TxId: 106 2024-11-21T23:06:05.874361Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2024-11-21T23:06:05.874418Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T23:06:05.874697Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T23:06:05.874776Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T23:06:05.874809Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:1 2024-11-21T23:06:05.874826Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:1 2024-11-21T23:06:05.874888Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2024-11-21T23:06:05.880704Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:05.881051Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:296:2288] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2024-11-21T23:06:05.881943Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T23:06:05.882224Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:598:2552] 2024-11-21T23:06:05.886078Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:600:2554], Recipient [6:122:2148]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:05.886116Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:05.886139Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 |79.4%| [TA] $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TDatabaseResolverTests::Ydb_Serverless >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> DataStreams::TestInvalidRetentionCombinations [GOOD] |79.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2024-11-21T23:05:53.270173Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:05:53.388688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:05:53.412322Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:05:53.412399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:05:53.412557Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:05:53.419204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:05:53.419464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:53.419682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:53.419804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:53.419924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:53.420133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:53.420335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:53.420480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:53.420611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:53.420756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.420867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:53.420980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:53.444966Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:53.449298Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:05:53.449490Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:05:53.449554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:05:53.449718Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:53.449885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:05:53.449956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:05:53.449994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:05:53.450069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:05:53.450133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:05:53.450189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:05:53.450220Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:05:53.450430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:53.450495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:05:53.450622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:05:53.450675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:05:53.450819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:05:53.450899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:05:53.450940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:05:53.450967Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:05:53.451107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:05:53.451166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:05:53.451199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:05:53.451245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:05:53.451288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:05:53.451311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:05:53.451456Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2024-11-21T23:05:53.451551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2024-11-21T23:05:53.451638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2024-11-21T23:05:53.451739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2024-11-21T23:05:53.451922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:05:53.452040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:05:53.452075Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:05:53.452271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:05:53.452316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.452347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.452482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:05:53.452540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:05:53.452572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:05:53.452778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:05:53.452818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:05:53.452853Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:05:53.452986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... nt=31;merger=0;interval_id=25; 2024-11-21T23:06:06.944359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T23:06:06.944490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T23:06:06.944528Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=31;finished=1; 2024-11-21T23:06:06.944565Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:06:06.944875Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:06:06.945151Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T23:06:06.945205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:06:06.945375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2024-11-21T23:06:06.945480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2024-11-21T23:06:06.945633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] send ScanData to [1:427:2442] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 2759 rows: 31 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary 2024-11-21T23:06:06.945804Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T23:06:06.945960Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T23:06:06.946119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T23:06:06.946374Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:06:06.946554Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T23:06:06.946706Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2024-11-21T23:06:06.946761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] finished for tablet 9437184 2024-11-21T23:06:06.946853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:428:2443] send ScanData to [1:427:2442] txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:06:06.947532Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:428:2443] and sent to [1:427:2442] packs: 0 txId: 103 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.005},{"events":["f_ProduceResults"],"t":0.016},{"events":["l_bootstrap"],"t":0.018},{"events":["f_processing","f_task_result"],"t":0.019},{"events":["f_ack","l_task_result"],"t":0.03},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.032}],"full":{"a":1732230366914082,"name":"_full_task","f":1732230366914082,"d_finished":0,"c":0,"l":1732230366946902,"d":32820},"events":[{"name":"bootstrap","f":1732230366920068,"d_finished":12027,"c":1,"l":1732230366932095,"d":12027},{"a":1732230366946343,"name":"ack","f":1732230366944847,"d_finished":1307,"c":1,"l":1732230366946154,"d":1866},{"a":1732230366946330,"name":"processing","f":1732230366933858,"d_finished":7555,"c":10,"l":1732230366946156,"d":8127},{"name":"ProduceResults","f":1732230366930188,"d_finished":3952,"c":13,"l":1732230366946745,"d":3952},{"a":1732230366946748,"name":"Finish","f":1732230366946748,"d_finished":0,"c":0,"l":1732230366946902,"d":154},{"name":"task_result","f":1732230366933879,"d_finished":6092,"c":9,"l":1732230366944630,"d":6092}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;) 2024-11-21T23:06:06.947620Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:06:06.912508Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2024-11-21T23:06:06.947665Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:06:06.947750Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:72;profile=;; 2024-11-21T23:06:06.947890Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:428:2443];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2024-11-21T23:06:08.813024Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> TBlobStorageProxyTest::TestNormal ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2024-11-21T23:04:45.098438Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872423313618362:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:45.098619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c0e/r3tmp/tmpSoroby/pdisk_1.dat 2024-11-21T23:04:48.196928Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:48.861448Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:48.865013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:48.865125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:48.888122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19974, node 1 2024-11-21T23:04:49.853675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:49.853952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:49.853957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:49.854241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:04:50.103659Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872423313618362:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:50.103735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:2656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:53.170259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.194290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:53.194357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.200027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:04:53.200168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:04:53.200187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:04:53.214314Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:04:53.214343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:04:53.222343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:53.228325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.275772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230293297, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:53.276092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:53.277399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:04:53.300617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:53.301790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:53.301840Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:04:53.302700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:04:53.305420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:04:53.305486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:04:53.334028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:04:53.341802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:04:53.341837Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:04:53.344138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:04:53.751824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.752641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:53.752657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.752824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:04:53.753107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:04:53.753117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:04:53.768388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:04:53.768551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:53.768768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:53.769723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:04:53.769755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:04:53.769767Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:04:53.769831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:2656 2024-11-21T23:04:53.987550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.987770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:53.987817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.991344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:04:53.991535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.997812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230294039, transactions count in step: 1, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:04:53.997842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230294039, at schemeshard: 72057594046644480 2024-11-21T23:04:53.998587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:04:53.998715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:04:53.998781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:04:54.006354Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:04:54.012324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:54.012576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:54.014813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:04:54.015058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:04:54.015075Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:04:54.015137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE ... .891412Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 1 -> 3 2024-11-21T23:06:02.904092Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:06:03.017738Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.050565Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.050894Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.079349Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.098907Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.116760Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.116800Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 3 -> 128 2024-11-21T23:06:03.141706Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:06:03.163331Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230363199, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:06:03.163396Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715660:0 HandleReply TEvOperationPlan, step: 1732230363199, at tablet: 72057594046644480 2024-11-21T23:06:03.163685Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 128 -> 240 2024-11-21T23:06:03.177264Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:03.179252Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:03.179353Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2024-11-21T23:06:03.179476Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2024-11-21T23:06:03.179545Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2024-11-21T23:06:03.179762Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715660, publications: 2, subscribers: 1 2024-11-21T23:06:03.188493Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T23:06:03.188542Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T23:06:03.188568Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T23:06:03.189947Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715660 2024-11-21T23:06:03.197375Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715660 2024-11-21T23:06:03.197413Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:06:03.197494Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715660, subscribers: 1 2024-11-21T23:06:03.328248Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestInvalidRetentionCombinationsa, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:06:03.328669Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:03.362996Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715661, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestInvalidRetentionCombinationsa 2024-11-21T23:06:03.364504Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:03.365267Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:03.366580Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T23:06:03.375508Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T23:06:03.376825Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T23:06:03.376860Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T23:06:03.376875Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2024-11-21T23:06:03.378301Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T23:06:03.378324Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T23:06:03.378334Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2024-11-21T23:06:03.411026Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:03.411988Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:03.413083Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:03.414234Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:03.417205Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:03.432816Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715661:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:03.432902Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 1 -> 3 2024-11-21T23:06:03.445207Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:06:03.733655Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.787145Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.793027Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.798573Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.798886Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.799010Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976715661:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:03.799033Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 3 -> 128 2024-11-21T23:06:03.805577Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:06:03.812270Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230363857, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:06:03.812343Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976715661:0 HandleReply TEvOperationPlan, step: 1732230363857, at tablet: 72057594046644480 2024-11-21T23:06:03.812590Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715661:0 128 -> 240 2024-11-21T23:06:03.814772Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:03.815240Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:03.815308Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715661:0 ProgressState 2024-11-21T23:06:03.815423Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2024-11-21T23:06:03.815481Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2024-11-21T23:06:03.815720Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 2, subscribers: 1 2024-11-21T23:06:03.821610Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T23:06:03.821670Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T23:06:03.821695Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 10 2024-11-21T23:06:03.821956Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2024-11-21T23:06:03.822005Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2024-11-21T23:06:03.822021Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T23:06:03.822087Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 |79.4%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] Test command err: 2024-11-21T23:03:35.521213Z :BS_VDISK_GET CRIT: VDISK[0:_:0:0:0]: TEvVGetResult: Result message is too large; size# 67108001 orig# {ExtrQuery# [5000:1:0:0:0:100000:1] sh# 257 sz# 99743 c# 0}{ExtrQuery# [5000:1:1:0:0:100000:1] sh# 257 sz# 99743 c# 1}{ExtrQuery# [5000:1:2:0:0:100000:1] sh# 257 sz# 99743 c# 2}{ExtrQuery# [5000:1:3:0:0:100000:1] sh# 257 sz# 99743 c# 3}{ExtrQuery# [5000:1:4:0:0:100000:1] sh# 257 sz# 99743 c# 4}{ExtrQuery# [5000:1:5:0:0:100000:1] sh# 257 sz# 99743 c# 5}{ExtrQuery# [5000:1:6:0:0:100000:1] sh# 257 sz# 99743 c# 6}{ExtrQuery# [5000:1:7:0:0:100000:1] sh# 257 sz# 99743 c# 7}{ExtrQuery# [5000:1:8:0:0:100000:1] sh# 257 sz# 99743 c# 8}{ExtrQuery# [5000:1:9:0:0:100000:1] sh# 257 sz# 99743 c# 9}{ExtrQuery# [5000:1:10:0:0:100000:1] sh# 257 sz# 99743 c# 10}{ExtrQuery# [5000:1:11:0:0:100000:1] sh# 257 sz# 99743 c# 11}{ExtrQuery# [5000:1:12:0:0:100000:1] sh# 257 sz# 99743 c# 12}{ExtrQuery# [5000:1:13:0:0:100000:1] sh# 257 sz# 99743 c# 13}{ExtrQuery# [5000:1:14:0:0:100000:1] sh# 257 sz# 99743 c# 14}{ExtrQuery# [5000:1:15:0:0:100000:1] sh# 257 sz# 99743 c# 15}{ExtrQuery# [5000:1:16:0:0:100000:1] sh# 257 sz# 99743 c# 16}{ExtrQuery# [5000:1:17:0:0:100000:1] sh# 257 sz# 99743 c# 17}{ExtrQuery# [5000:1:18:0:0:100000:1] sh# 257 sz# 99743 c# 18}{ExtrQuery# [5000:1:19:0:0:100000:1] sh# 257 sz# 99743 c# 19}{ExtrQuery# [5000:1:20:0:0:100000:1] sh# 257 sz# 99743 c# 20}{ExtrQuery# [5000:1:21:0:0:100000:1] sh# 257 sz# 99743 c# 21}{ExtrQuery# [5000:1:22:0:0:100000:1] sh# 257 sz# 99743 c# 22}{ExtrQuery# [5000:1:23:0:0:100000:1] sh# 257 sz# 99743 c# 23}{ExtrQuery# [5000:1:24:0:0:100000:1] sh# 257 sz# 99743 c# 24}{ExtrQuery# [5000:1:25:0:0:100000:1] sh# 257 sz# 99743 c# 25}{ExtrQuery# [5000:1:26:0:0:100000:1] sh# 257 sz# 99743 c# 26}{ExtrQuery# [5000:1:27:0:0:100000:1] sh# 257 sz# 99743 c# 27}{ExtrQuery# [5000:1:28:0:0:100000:1] sh# 257 sz# 99743 c# 28}{ExtrQuery# [5000:1:29:0:0:100000:1] sh# 257 sz# 99743 c# 29}{ExtrQuery# [5000:1:30:0:0:100000:1] sh# 257 sz# 99743 c# 30}{ExtrQuery# [5000:1:31:0:0:100000:1] sh# 257 sz# 99743 c# 31}{ExtrQuery# [5000:1:32:0:0:100000:1] sh# 257 sz# 99743 c# 32}{ExtrQuery# [5000:1:33:0:0:100000:1] sh# 257 sz# 99743 c# 33}{ExtrQuery# [5000:1:34:0:0:100000:1] sh# 257 sz# 99743 c# 34}{ExtrQuery# [5000:1:35:0:0:100000:1] sh# 257 sz# 99743 c# 35}{ExtrQuery# [5000:1:36:0:0:100000:1] sh# 257 sz# 99743 c# 36}{ExtrQuery# [5000:1:37:0:0:100000:1] sh# 257 sz# 99743 c# 37}{ExtrQuery# [5000:1:38:0:0:100000:1] sh# 257 sz# 99743 c# 38}{ExtrQuery# [5000:1:39:0:0:100000:1] sh# 257 sz# 99743 c# 39}{ExtrQuery# [5000:1:40:0:0:100000:1] sh# 257 sz# 99743 c# 40}{ExtrQuery# [5000:1:41:0:0:100000:1] sh# 257 sz# 99743 c# 41}{ExtrQuery# [5000:1:42:0:0:100000:1] sh# 257 sz# 99743 c# 42}{ExtrQuery# [5000:1:43:0:0:100000:1] sh# 257 sz# 99743 c# 43}{ExtrQuery# [5000:1:44:0:0:100000:1] sh# 257 sz# 99743 c# 44}{ExtrQuery# [5000:1:45:0:0:100000:1] sh# 257 sz# 99743 c# 45}{ExtrQuery# [5000:1:46:0:0:100000:1] sh# 257 sz# 99743 c# 46}{ExtrQuery# [5000:1:47:0:0:100000:1] sh# 257 sz# 99743 c# 47}{ExtrQuery# [5000:1:48:0:0:100000:1] sh# 257 sz# 99743 c# 48}{ExtrQuery# [5000:1:49:0:0:100000:1] sh# 257 sz# 99743 c# 49}{ExtrQuery# [5000:1:50:0:0:100000:1] sh# 257 sz# 99743 c# 50}{ExtrQuery# [5000:1:51:0:0:100000:1] sh# 257 sz# 99743 c# 51}{ExtrQuery# [5000:1:52:0:0:100000:1] sh# 257 sz# 99743 c# 52}{ExtrQuery# [5000:1:53:0:0:100000:1] sh# 257 sz# 99743 c# 53}{ExtrQuery# [5000:1:54:0:0:100000:1] sh# 257 sz# 99743 c# 54}{ExtrQuery# [5000:1:55:0:0:100000:1] sh# 257 sz# 99743 c# 55}{ExtrQuery# [5000:1:56:0:0:100000:1] sh# 257 sz# 99743 c# 56}{ExtrQuery# [5000:1:57:0:0:100000:1] sh# 257 sz# 99743 c# 57}{ExtrQuery# [5000:1:58:0:0:100000:1] sh# 257 sz# 99743 c# 58}{ExtrQuery# [5000:1:59:0:0:100000:1] sh# 257 sz# 99743 c# 59}{ExtrQuery# [5000:1:60:0:0:100000:1] sh# 257 sz# 99743 c# 60}{ExtrQuery# [5000:1:61:0:0:100000:1] sh# 257 sz# 99743 c# 61}{ExtrQuery# [5000:1:62:0:0:100000:1] sh# 257 sz# 99743 c# 62}{ExtrQuery# [5000:1:63:0:0:100000:1] sh# 257 sz# 99743 c# 63}{ExtrQuery# [5000:1:64:0:0:100000:1] sh# 257 sz# 99743 c# 64}{ExtrQuery# [5000:1:65:0:0:100000:1] sh# 257 sz# 99743 c# 65}{ExtrQuery# [5000:1:66:0:0:100000:1] sh# 257 sz# 99743 c# 66}{ExtrQuery# [5000:1:67:0:0:100000:1] sh# 257 sz# 99743 c# 67}{ExtrQuery# [5000:1:68:0:0:100000:1] sh# 257 sz# 99743 c# 68}{ExtrQuery# [5000:1:69:0:0:100000:1] sh# 257 sz# 99743 c# 69}{ExtrQuery# [5000:1:70:0:0:100000:1] sh# 257 sz# 99743 c# 70}{ExtrQuery# [5000:1:71:0:0:100000:1] sh# 257 sz# 99743 c# 71}{ExtrQuery# [5000:1:72:0:0:100000:1] sh# 257 sz# 99743 c# 72}{ExtrQuery# [5000:1:73:0:0:100000:1] sh# 257 sz# 99743 c# 73}{ExtrQuery# [5000:1:74:0:0:100000:1] sh# 257 sz# 99743 c# 74}{ExtrQuery# [5000:1:75:0:0:100000:1] sh# 257 sz# 99743 c# 75}{ExtrQuery# [5000:1:76:0:0:100000:1] sh# 257 sz# 99743 c# 76}{ExtrQuery# [5000:1:77:0:0:100000:1] sh# 257 sz# 99743 c# 77}{ExtrQuery# [5000:1:78:0:0:100000:1] sh# 257 sz# 99743 c# 78}{ExtrQuery# [5000:1:79:0:0:100000:1] sh# 257 sz# 99743 c# 79}{ExtrQuery# [5000:1:80:0:0:100000:1] sh# 257 sz# 99743 c# 80}{ExtrQuery# [5000:1:81:0:0:100000:1] sh# 257 sz# 99743 c# 81}{ExtrQuery# [5000:1:82:0:0:100000:1] sh# 257 sz# 99743 c# 82}{ExtrQuery# [5000:1:83:0:0:100000:1] sh# 257 sz# 99743 c# 83}{ExtrQuery# [5000:1:84:0:0:100000:1] sh# 257 sz# 99743 c# 84}{ExtrQuery# [5000:1:85:0:0:100000:1] sh# 257 sz# 99743 c# 85}{ExtrQuery# [5000:1:86:0:0:100000:1] sh# 257 sz# 99743 c# 86}{ExtrQuery# [5000:1:87:0:0:100000:1] sh# 257 sz# 99743 c# 87}{ExtrQuery# [5000:1:88:0:0:100000:1] sh# 257 sz# 99743 c# 88}{ExtrQuery# [5000:1:89:0:0:100000:1] sh# 257 sz# 99743 c# 89}{ExtrQuery# [5000:1:90:0:0:100000:1] sh# 257 sz# 99743 c# 90}{ExtrQuery# [5000:1:91:0:0:100000:1] sh# 257 sz# 99743 c# 91}{ExtrQuery# [5000:1:92:0:0:100000:1] sh# 257 sz# 99743 c# 92}{ExtrQuery# [5000:1:93:0:0:100000:1] sh# 257 sz# 99743 c# 93}{ExtrQuery# [5000:1:94:0:0:100000:1] sh# 257 sz# 99743 c# 94}{ExtrQuery# [5000:1:95:0:0:100000:1] sh# 257 sz# 99743 c# 95}{ExtrQuery# [5000:1:96:0:0:100000:1] sh# 257 sz# 99743 c# 96}{ExtrQuery# [5000:1:97:0:0:100000:1] sh# 257 sz# 99743 c# 97}{ExtrQuery# [5000:1:98:0:0:100000:1] sh# 257 sz# 99743 c# 98}{ExtrQuery# [5000:1:99:0:0:100000:1] sh# 257 sz# 99743 c# 99}{ExtrQuery# [5000:1:100:0:0:100000:1] sh# 257 sz# 99743 c# 100}{ExtrQuery# [5000:1:101:0:0:100000:1] sh# 257 sz# 99743 c# 101}{ExtrQuery# [5000:1:102:0:0:100000:1] sh# 257 sz# 99743 c# 102}{ExtrQuery# [5000:1:103:0:0:100000:1] sh# 257 sz# 99743 c# 103}{ExtrQuery# [5000:1:104:0:0:100000:1] sh# 257 sz# 99743 c# 104}{ExtrQuery# [5000:1:105:0:0:100000:1] sh# 257 sz# 99743 c# 105}{ExtrQuery# [5000:1:106:0:0:100000:1] sh# 257 sz# 99743 c# 106}{ExtrQuery# [5000:1:107:0:0:100000:1] sh# 257 sz# 99743 c# 107}{ExtrQuery# [5000:1:108:0:0:100000:1] sh# 257 sz# 99743 c# 108}{ExtrQuery# [5000:1:109:0:0:100000:1] sh# 257 sz# 99743 c# 109}{ExtrQuery# [5000:1:110:0:0:100000:1] sh# 257 sz# 99743 c# 110}{ExtrQuery# [5000:1:111:0:0:100000:1] sh# 257 sz# 99743 c# 111}{ExtrQuery# [5000:1:112:0:0:100000:1] sh# 257 sz# 99743 c# 112}{ExtrQuery# [5000:1:113:0:0:100000:1] sh# 257 sz# 99743 c# 113}{ExtrQuery# [5000:1:114:0:0:100000:1] sh# 257 sz# 99743 c# 114}{ExtrQuery# [5000:1:115:0:0:100000:1] sh# 257 sz# 99743 c# 115}{ExtrQuery# [5000:1:116:0:0:100000:1] sh# 257 sz# 99743 c# 116}{ExtrQuery# [5000:1:117:0:0:100000:1] sh# 257 sz# 99743 c# 117}{ExtrQuery# [5000:1:118:0:0:100000:1] sh# 257 sz# 99743 c# 118}{ExtrQuery# [5000:1:119:0:0:100000:1] sh# 257 sz# 99743 c# 119}{ExtrQuery# [5000:1:120:0:0:100000:1] sh# 257 sz# 99743 c# 120}{ExtrQuery# [5000:1:121:0:0:100000:1] sh# 257 sz# 99743 c# 121}{ExtrQuery# [5000:1:122:0:0:100000:1] sh# 257 sz# 99743 c# 122}{ExtrQuery# [5000:1:123:0:0:100000:1] sh# 257 sz# 99743 c# 123}{ExtrQuery# [5000:1:124:0:0:100000:1] sh# 257 sz# 99743 c# 124}{ExtrQuery# [5000:1:125:0:0:100000:1] sh# 257 sz# 99743 c# 125}{ExtrQuery# [5000:1:126:0:0:100000:1] sh# 257 sz# 99743 c# 126}{ExtrQuery# [5000:1:127:0:0:100000:1] sh# 257 sz# 99743 c# 127}{ExtrQuery# [5000:1:128:0:0:100000:1] sh# 257 sz# 99743 c# 128}{ExtrQuery# [5000:1:129:0:0:100000:1] sh# 257 sz# 99743 c# 129}{ExtrQuery# [5000:1:130:0:0:100000:1] sh# 257 sz# 99743 c# 130}{ExtrQuery# [5000:1:131:0:0:100000:1] sh# 257 sz# 99743 c# 131}{ExtrQuery# [5000:1:132:0:0:100000:1] sh# 257 sz# 99743 c# 132}{ExtrQuery# [5000:1:133:0:0:100000:1] sh# 257 sz# 99743 c# 133}{ExtrQuery# [5000:1:134:0:0:100000:1] sh# 257 sz# 99743 c# 134}{ExtrQuery# [5000:1:135:0:0:100000:1] sh# 257 sz# 99743 c# 135}{ExtrQuery# [5000:1:136:0:0:100000:1] sh# 257 sz# 99743 c# 136}{ExtrQuery# [5000:1:137:0:0:100000:1] sh# 257 sz# 99743 c# 137}{ExtrQuery# [5000:1:138:0:0:100000:1] sh# 257 sz# 99743 c# 138}{ExtrQuery# [5000:1:139:0:0:100000:1] sh# 257 sz# 99743 c# 139}{ExtrQuery# [5000:1:140:0:0:100000:1] sh# 257 sz# 99743 c# 140}{ExtrQuery# [5000:1:141:0:0:100000:1] sh# 257 sz# 99743 c# 141}{ExtrQuery# [5000:1:142:0:0:100000:1] sh# 257 sz# 99743 c# 142}{ExtrQuery# [5000:1:143:0:0:100000:1] sh# 257 sz# 99743 c# 143}{ExtrQuery# [5000:1:144:0:0:100000:1] sh# 257 sz# 99743 c# 144}{ExtrQuery# [5000:1:145:0:0:100000:1] sh# 257 sz# 99743 c# 145}{ExtrQuery# [5000:1:146:0:0:100000:1] sh# 257 sz# 99743 c# 146}{ExtrQuery# [5000:1:147:0:0:100000:1] sh# 257 sz# 99743 c# 147}{ExtrQuery# [5000:1:148:0:0:100000:1] sh# 257 sz# 99743 c# 148}{ExtrQuery# [5000:1:149:0:0:100000:1] sh# 257 sz# 99743 c# 149}{ExtrQuery# [5000:1:150:0:0:100000:1] sh# 257 sz# 99743 c# 150}{ExtrQuery# [5000:1:151:0:0:100000:1] sh# 257 sz# 99743 c# 151}{ExtrQuery# [5000:1:152:0:0:100000:1] sh# 257 sz# 99743 c# 152}{ExtrQuery# [5000:1:153:0:0:100000:1] sh# 257 sz# 99743 c# 153}{ExtrQuery# [5000:1:154:0:0:100000:1] sh# 257 sz# 99743 c# 154}{ExtrQuery# [5000:1:155:0:0:100000:1] sh# 257 sz# 99743 c# 155}{ExtrQuery# [5000:1:156:0:0:100000:1] sh# 257 sz# 99743 c# 156}{ExtrQuery# [5000:1:157:0:0:100000:1] sh# 257 sz# 99743 c# 157}{ExtrQuery# [5000:1:158:0:0:100000:1] sh# 257 sz# 99743 c# 158}{ExtrQuery# [5000:1:159:0:0:100000:1] sh# 257 sz# 99743 c# 159}{ExtrQuery# [5000:1:160:0:0:100000:1] sh# 257 sz# 99743 c# 160}{ExtrQuery# [5000:1:161:0:0:100000:1] sh# 257 sz# 99743 c# 161}{ExtrQuery# [5000:1:162:0:0:100000:1] sh# 257 sz# 99743 c# 162}{ExtrQuery# [5000:1:163:0:0:100000:1] sh# 257 sz# 99743 c# 163}{ExtrQuery# [5000:1:164:0:0:100000:1] sh# 257 sz# 99743 c# 164}{ExtrQuery# [5000:1:165:0:0:100000:1] sh# 257 sz# 99743 c# 165}{ExtrQuery# [5000:1:166:0:0:100000:1] sh# 257 sz# 99743 c# 166}{ExtrQuery# [5000:1:167:0:0:100000:1] sh# 257 sz# 99743 c# 167}{ExtrQuery# [5000:1:168:0:0:100000:1] sh# 257 sz# 99743 c# 168}{ExtrQuery# [5000:1:169:0:0:100000:1] sh# 257 sz# 99743 c# 169}{ExtrQuery# [5000:1:170:0:0:100000:1] sh# 257 sz# 99743 c# 170}{ExtrQuery# [5000:1:171:0:0:100000:1] sh# 257 sz# 99743 c# 171}{ExtrQuery# [5000:1:172:0:0:100000:1] sh# 257 sz# 99743 c# 172}{ExtrQuery# [5000:1:173:0:0:100000:1] sh# 257 sz# 99743 c# 173}{ExtrQuery# [5000:1:174:0:0:100000:1] sh# 257 sz# 99743 c# 174}{ExtrQuery# [5000:1:175:0:0:100000:1] sh# 257 sz# 99743 c# 175}{ExtrQuery# [5000:1:176:0:0:100000:1] sh# 257 sz# 99743 c# 176}{ExtrQuery# [5000:1:177:0:0:100000:1] sh# 257 sz# 99743 c# 177}{ExtrQuery# [5000:1:178:0:0:100000:1] sh# 257 sz# 99743 c# 178}{ExtrQuery# [5000:1:179:0:0:100000:1] sh# 257 sz# 99743 c# 179}{ExtrQuery# [5000:1:180:0:0:100000:1] sh# 257 sz# 99743 c# 180}{ExtrQuery# [5000:1:181:0:0:100000:1] sh# 257 sz# 99743 c# 181}{ExtrQuery# [5000:1:182:0:0:100000:1] sh# 257 sz# 99743 c# 182}{ExtrQuery# [5000:1:183:0:0:100000:1] sh# 257 sz# 99743 c# 183}{ExtrQuery# [5000:1:184:0:0:100000:1] sh# 257 sz# 99743 c# 184}{ExtrQuery# [5000:1:185:0:0:100000:1] sh# 257 sz# 99743 c# 185}{ExtrQuery# [5000:1:186:0:0:100000:1] sh# 257 sz# 99743 c# 186}{ExtrQuery# [5000:1:187:0:0:100000:1] sh# 257 sz# 99743 c# 187}{ExtrQuery# [5000:1:188:0:0:100000:1] sh# 257 sz# 99743 c# 188}{ExtrQuery# [5000:1:189:0:0:100000:1] sh# 257 sz# 99743 c# 189}{ExtrQuery# [5000:1:190:0:0:100000:1] sh# 257 sz# 99743 c# 190}{ExtrQuery# [5000:1:191:0:0:100000:1] sh# 257 sz# 99743 ... sz# 99743 c# 484}{ExtrQuery# [5000:1:485:0:0:100000:1] sh# 257 sz# 99743 c# 485}{ExtrQuery# [5000:1:486:0:0:100000:1] sh# 257 sz# 99743 c# 486}{ExtrQuery# [5000:1:487:0:0:100000:1] sh# 257 sz# 99743 c# 487}{ExtrQuery# [5000:1:488:0:0:100000:1] sh# 257 sz# 99743 c# 488}{ExtrQuery# [5000:1:489:0:0:100000:1] sh# 257 sz# 99743 c# 489}{ExtrQuery# [5000:1:490:0:0:100000:1] sh# 257 sz# 99743 c# 490}{ExtrQuery# [5000:1:491:0:0:100000:1] sh# 257 sz# 99743 c# 491}{ExtrQuery# [5000:1:492:0:0:100000:1] sh# 257 sz# 99743 c# 492}{ExtrQuery# [5000:1:493:0:0:100000:1] sh# 257 sz# 99743 c# 493}{ExtrQuery# [5000:1:494:0:0:100000:1] sh# 257 sz# 99743 c# 494}{ExtrQuery# [5000:1:495:0:0:100000:1] sh# 257 sz# 99743 c# 495}{ExtrQuery# [5000:1:496:0:0:100000:1] sh# 257 sz# 99743 c# 496}{ExtrQuery# [5000:1:497:0:0:100000:1] sh# 257 sz# 99743 c# 497}{ExtrQuery# [5000:1:498:0:0:100000:1] sh# 257 sz# 99743 c# 498}{ExtrQuery# [5000:1:499:0:0:100000:1] sh# 257 sz# 99743 c# 499}{ExtrQuery# [5000:1:500:0:0:100000:1] sh# 257 sz# 99743 c# 500}{ExtrQuery# [5000:1:501:0:0:100000:1] sh# 257 sz# 99743 c# 501}{ExtrQuery# [5000:1:502:0:0:100000:1] sh# 257 sz# 99743 c# 502}{ExtrQuery# [5000:1:503:0:0:100000:1] sh# 257 sz# 99743 c# 503}{ExtrQuery# [5000:1:504:0:0:100000:1] sh# 257 sz# 99743 c# 504}{ExtrQuery# [5000:1:505:0:0:100000:1] sh# 257 sz# 99743 c# 505}{ExtrQuery# [5000:1:506:0:0:100000:1] sh# 257 sz# 99743 c# 506}{ExtrQuery# [5000:1:507:0:0:100000:1] sh# 257 sz# 99743 c# 507}{ExtrQuery# [5000:1:508:0:0:100000:1] sh# 257 sz# 99743 c# 508}{ExtrQuery# [5000:1:509:0:0:100000:1] sh# 257 sz# 99743 c# 509}{ExtrQuery# [5000:1:510:0:0:100000:1] sh# 257 sz# 99743 c# 510}{ExtrQuery# [5000:1:511:0:0:100000:1] sh# 257 sz# 99743 c# 511}{ExtrQuery# [5000:1:512:0:0:100000:1] sh# 257 sz# 99743 c# 512}{ExtrQuery# [5000:1:513:0:0:100000:1] sh# 257 sz# 99743 c# 513}{ExtrQuery# [5000:1:514:0:0:100000:1] sh# 257 sz# 99743 c# 514}{ExtrQuery# [5000:1:515:0:0:100000:1] sh# 257 sz# 99743 c# 515}{ExtrQuery# [5000:1:516:0:0:100000:1] sh# 257 sz# 99743 c# 516}{ExtrQuery# [5000:1:517:0:0:100000:1] sh# 257 sz# 99743 c# 517}{ExtrQuery# [5000:1:518:0:0:100000:1] sh# 257 sz# 99743 c# 518}{ExtrQuery# [5000:1:519:0:0:100000:1] sh# 257 sz# 99743 c# 519}{ExtrQuery# [5000:1:520:0:0:100000:1] sh# 257 sz# 99743 c# 520}{ExtrQuery# [5000:1:521:0:0:100000:1] sh# 257 sz# 99743 c# 521}{ExtrQuery# [5000:1:522:0:0:100000:1] sh# 257 sz# 99743 c# 522}{ExtrQuery# [5000:1:523:0:0:100000:1] sh# 257 sz# 99743 c# 523}{ExtrQuery# [5000:1:524:0:0:100000:1] sh# 257 sz# 99743 c# 524}{ExtrQuery# [5000:1:525:0:0:100000:1] sh# 257 sz# 99743 c# 525}{ExtrQuery# [5000:1:526:0:0:100000:1] sh# 257 sz# 99743 c# 526}{ExtrQuery# [5000:1:527:0:0:100000:1] sh# 257 sz# 99743 c# 527}{ExtrQuery# [5000:1:528:0:0:100000:1] sh# 257 sz# 99743 c# 528}{ExtrQuery# [5000:1:529:0:0:100000:1] sh# 257 sz# 99743 c# 529}{ExtrQuery# [5000:1:530:0:0:100000:1] sh# 257 sz# 99743 c# 530}{ExtrQuery# [5000:1:531:0:0:100000:1] sh# 257 sz# 99743 c# 531}{ExtrQuery# [5000:1:532:0:0:100000:1] sh# 257 sz# 99743 c# 532}{ExtrQuery# [5000:1:533:0:0:100000:1] sh# 257 sz# 99743 c# 533}{ExtrQuery# [5000:1:534:0:0:100000:1] sh# 257 sz# 99743 c# 534}{ExtrQuery# [5000:1:535:0:0:100000:1] sh# 257 sz# 99743 c# 535}{ExtrQuery# [5000:1:536:0:0:100000:1] sh# 257 sz# 99743 c# 536}{ExtrQuery# [5000:1:537:0:0:100000:1] sh# 257 sz# 99743 c# 537}{ExtrQuery# [5000:1:538:0:0:100000:1] sh# 257 sz# 99743 c# 538}{ExtrQuery# [5000:1:539:0:0:100000:1] sh# 257 sz# 99743 c# 539}{ExtrQuery# [5000:1:540:0:0:100000:1] sh# 257 sz# 99743 c# 540}{ExtrQuery# [5000:1:541:0:0:100000:1] sh# 257 sz# 99743 c# 541}{ExtrQuery# [5000:1:542:0:0:100000:1] sh# 257 sz# 99743 c# 542}{ExtrQuery# [5000:1:543:0:0:100000:1] sh# 257 sz# 99743 c# 543}{ExtrQuery# [5000:1:544:0:0:100000:1] sh# 257 sz# 99743 c# 544}{ExtrQuery# [5000:1:545:0:0:100000:1] sh# 257 sz# 99743 c# 545}{ExtrQuery# [5000:1:546:0:0:100000:1] sh# 257 sz# 99743 c# 546}{ExtrQuery# [5000:1:547:0:0:100000:1] sh# 257 sz# 99743 c# 547}{ExtrQuery# [5000:1:548:0:0:100000:1] sh# 257 sz# 99743 c# 548}{ExtrQuery# [5000:1:549:0:0:100000:1] sh# 257 sz# 99743 c# 549}{ExtrQuery# [5000:1:550:0:0:100000:1] sh# 257 sz# 99743 c# 550}{ExtrQuery# [5000:1:551:0:0:100000:1] sh# 257 sz# 99743 c# 551}{ExtrQuery# [5000:1:552:0:0:100000:1] sh# 257 sz# 99743 c# 552}{ExtrQuery# [5000:1:553:0:0:100000:1] sh# 257 sz# 99743 c# 553}{ExtrQuery# [5000:1:554:0:0:100000:1] sh# 257 sz# 99743 c# 554}{ExtrQuery# [5000:1:555:0:0:100000:1] sh# 257 sz# 99743 c# 555}{ExtrQuery# [5000:1:556:0:0:100000:1] sh# 257 sz# 99743 c# 556}{ExtrQuery# [5000:1:557:0:0:100000:1] sh# 257 sz# 99743 c# 557}{ExtrQuery# [5000:1:558:0:0:100000:1] sh# 257 sz# 99743 c# 558}{ExtrQuery# [5000:1:559:0:0:100000:1] sh# 257 sz# 99743 c# 559}{ExtrQuery# [5000:1:560:0:0:100000:1] sh# 257 sz# 99743 c# 560}{ExtrQuery# [5000:1:561:0:0:100000:1] sh# 257 sz# 99743 c# 561}{ExtrQuery# [5000:1:562:0:0:100000:1] sh# 257 sz# 99743 c# 562}{ExtrQuery# [5000:1:563:0:0:100000:1] sh# 257 sz# 99743 c# 563}{ExtrQuery# [5000:1:564:0:0:100000:1] sh# 257 sz# 99743 c# 564}{ExtrQuery# [5000:1:565:0:0:100000:1] sh# 257 sz# 99743 c# 565}{ExtrQuery# [5000:1:566:0:0:100000:1] sh# 257 sz# 99743 c# 566}{ExtrQuery# [5000:1:567:0:0:100000:1] sh# 257 sz# 99743 c# 567}{ExtrQuery# [5000:1:568:0:0:100000:1] sh# 257 sz# 99743 c# 568}{ExtrQuery# [5000:1:569:0:0:100000:1] sh# 257 sz# 99743 c# 569}{ExtrQuery# [5000:1:570:0:0:100000:1] sh# 257 sz# 99743 c# 570}{ExtrQuery# [5000:1:571:0:0:100000:1] sh# 257 sz# 99743 c# 571}{ExtrQuery# [5000:1:572:0:0:100000:1] sh# 257 sz# 99743 c# 572}{ExtrQuery# [5000:1:573:0:0:100000:1] sh# 257 sz# 99743 c# 573}{ExtrQuery# [5000:1:574:0:0:100000:1] sh# 257 sz# 99743 c# 574}{ExtrQuery# [5000:1:575:0:0:100000:1] sh# 257 sz# 99743 c# 575}{ExtrQuery# [5000:1:576:0:0:100000:1] sh# 257 sz# 99743 c# 576}{ExtrQuery# [5000:1:577:0:0:100000:1] sh# 257 sz# 99743 c# 577}{ExtrQuery# [5000:1:578:0:0:100000:1] sh# 257 sz# 99743 c# 578}{ExtrQuery# [5000:1:579:0:0:100000:1] sh# 257 sz# 99743 c# 579}{ExtrQuery# [5000:1:580:0:0:100000:1] sh# 257 sz# 99743 c# 580}{ExtrQuery# [5000:1:581:0:0:100000:1] sh# 257 sz# 99743 c# 581}{ExtrQuery# [5000:1:582:0:0:100000:1] sh# 257 sz# 99743 c# 582}{ExtrQuery# [5000:1:583:0:0:100000:1] sh# 257 sz# 99743 c# 583}{ExtrQuery# [5000:1:584:0:0:100000:1] sh# 257 sz# 99743 c# 584}{ExtrQuery# [5000:1:585:0:0:100000:1] sh# 257 sz# 99743 c# 585}{ExtrQuery# [5000:1:586:0:0:100000:1] sh# 257 sz# 99743 c# 586}{ExtrQuery# [5000:1:587:0:0:100000:1] sh# 257 sz# 99743 c# 587}{ExtrQuery# [5000:1:588:0:0:100000:1] sh# 257 sz# 99743 c# 588}{ExtrQuery# [5000:1:589:0:0:100000:1] sh# 257 sz# 99743 c# 589}{ExtrQuery# [5000:1:590:0:0:100000:1] sh# 257 sz# 99743 c# 590}{ExtrQuery# [5000:1:591:0:0:100000:1] sh# 257 sz# 99743 c# 591}{ExtrQuery# [5000:1:592:0:0:100000:1] sh# 257 sz# 99743 c# 592}{ExtrQuery# [5000:1:593:0:0:100000:1] sh# 257 sz# 99743 c# 593}{ExtrQuery# [5000:1:594:0:0:100000:1] sh# 257 sz# 99743 c# 594}{ExtrQuery# [5000:1:595:0:0:100000:1] sh# 257 sz# 99743 c# 595}{ExtrQuery# [5000:1:596:0:0:100000:1] sh# 257 sz# 99743 c# 596}{ExtrQuery# [5000:1:597:0:0:100000:1] sh# 257 sz# 99743 c# 597}{ExtrQuery# [5000:1:598:0:0:100000:1] sh# 257 sz# 99743 c# 598}{ExtrQuery# [5000:1:599:0:0:100000:1] sh# 257 sz# 99743 c# 599}{ExtrQuery# [5000:1:600:0:0:100000:1] sh# 257 sz# 99743 c# 600}{ExtrQuery# [5000:1:601:0:0:100000:1] sh# 257 sz# 99743 c# 601}{ExtrQuery# [5000:1:602:0:0:100000:1] sh# 257 sz# 99743 c# 602}{ExtrQuery# [5000:1:603:0:0:100000:1] sh# 257 sz# 99743 c# 603}{ExtrQuery# [5000:1:604:0:0:100000:1] sh# 257 sz# 99743 c# 604}{ExtrQuery# [5000:1:605:0:0:100000:1] sh# 257 sz# 99743 c# 605}{ExtrQuery# [5000:1:606:0:0:100000:1] sh# 257 sz# 99743 c# 606}{ExtrQuery# [5000:1:607:0:0:100000:1] sh# 257 sz# 99743 c# 607}{ExtrQuery# [5000:1:608:0:0:100000:1] sh# 257 sz# 99743 c# 608}{ExtrQuery# [5000:1:609:0:0:100000:1] sh# 257 sz# 99743 c# 609}{ExtrQuery# [5000:1:610:0:0:100000:1] sh# 257 sz# 99743 c# 610}{ExtrQuery# [5000:1:611:0:0:100000:1] sh# 257 sz# 99743 c# 611}{ExtrQuery# [5000:1:612:0:0:100000:1] sh# 257 sz# 99743 c# 612}{ExtrQuery# [5000:1:613:0:0:100000:1] sh# 257 sz# 99743 c# 613}{ExtrQuery# [5000:1:614:0:0:100000:1] sh# 257 sz# 99743 c# 614}{ExtrQuery# [5000:1:615:0:0:100000:1] sh# 257 sz# 99743 c# 615}{ExtrQuery# [5000:1:616:0:0:100000:1] sh# 257 sz# 99743 c# 616}{ExtrQuery# [5000:1:617:0:0:100000:1] sh# 257 sz# 99743 c# 617}{ExtrQuery# [5000:1:618:0:0:100000:1] sh# 257 sz# 99743 c# 618}{ExtrQuery# [5000:1:619:0:0:100000:1] sh# 257 sz# 99743 c# 619}{ExtrQuery# [5000:1:620:0:0:100000:1] sh# 257 sz# 99743 c# 620}{ExtrQuery# [5000:1:621:0:0:100000:1] sh# 257 sz# 99743 c# 621}{ExtrQuery# [5000:1:622:0:0:100000:1] sh# 257 sz# 99743 c# 622}{ExtrQuery# [5000:1:623:0:0:100000:1] sh# 257 sz# 99743 c# 623}{ExtrQuery# [5000:1:624:0:0:100000:1] sh# 257 sz# 99743 c# 624}{ExtrQuery# [5000:1:625:0:0:100000:1] sh# 257 sz# 99743 c# 625}{ExtrQuery# [5000:1:626:0:0:100000:1] sh# 257 sz# 99743 c# 626}{ExtrQuery# [5000:1:627:0:0:100000:1] sh# 257 sz# 99743 c# 627}{ExtrQuery# [5000:1:628:0:0:100000:1] sh# 257 sz# 99743 c# 628}{ExtrQuery# [5000:1:629:0:0:100000:1] sh# 257 sz# 99743 c# 629}{ExtrQuery# [5000:1:630:0:0:100000:1] sh# 257 sz# 99743 c# 630}{ExtrQuery# [5000:1:631:0:0:100000:1] sh# 257 sz# 99743 c# 631}{ExtrQuery# [5000:1:632:0:0:100000:1] sh# 257 sz# 99743 c# 632}{ExtrQuery# [5000:1:633:0:0:100000:1] sh# 257 sz# 99743 c# 633}{ExtrQuery# [5000:1:634:0:0:100000:1] sh# 257 sz# 99743 c# 634}{ExtrQuery# [5000:1:635:0:0:100000:1] sh# 257 sz# 99743 c# 635}{ExtrQuery# [5000:1:636:0:0:100000:1] sh# 257 sz# 99743 c# 636}{ExtrQuery# [5000:1:637:0:0:100000:1] sh# 257 sz# 99743 c# 637}{ExtrQuery# [5000:1:638:0:0:100000:1] sh# 257 sz# 99743 c# 638}{ExtrQuery# [5000:1:639:0:0:100000:1] sh# 257 sz# 99743 c# 639}{ExtrQuery# [5000:1:640:0:0:100000:1] sh# 257 sz# 99743 c# 640}{ExtrQuery# [5000:1:641:0:0:100000:1] sh# 257 sz# 99743 c# 641}{ExtrQuery# [5000:1:642:0:0:100000:1] sh# 257 sz# 99743 c# 642}{ExtrQuery# [5000:1:643:0:0:100000:1] sh# 257 sz# 99743 c# 643}{ExtrQuery# [5000:1:644:0:0:100000:1] sh# 257 sz# 99743 c# 644}{ExtrQuery# [5000:1:645:0:0:100000:1] sh# 257 sz# 99743 c# 645}{ExtrQuery# [5000:1:646:0:0:100000:1] sh# 257 sz# 99743 c# 646}{ExtrQuery# [5000:1:647:0:0:100000:1] sh# 257 sz# 99743 c# 647}{ExtrQuery# [5000:1:648:0:0:100000:1] sh# 257 sz# 99743 c# 648}{ExtrQuery# [5000:1:649:0:0:100000:1] sh# 257 sz# 99743 c# 649}{ExtrQuery# [5000:1:650:0:0:100000:1] sh# 257 sz# 99743 c# 650}{ExtrQuery# [5000:1:651:0:0:100000:1] sh# 257 sz# 99743 c# 651}{ExtrQuery# [5000:1:652:0:0:100000:1] sh# 257 sz# 99743 c# 652}{ExtrQuery# [5000:1:653:0:0:100000:1] sh# 257 sz# 99743 c# 653}{ExtrQuery# [5000:1:654:0:0:100000:1] sh# 257 sz# 99743 c# 654}{ExtrQuery# [5000:1:655:0:0:100000:1] sh# 257 sz# 99743 c# 655}{ExtrQuery# [5000:1:656:0:0:100000:1] sh# 257 sz# 99743 c# 656}{ExtrQuery# [5000:1:657:0:0:100000:1] sh# 257 sz# 99743 c# 657}{ExtrQuery# [5000:1:658:0:0:100000:1] sh# 257 sz# 99743 c# 658}{ExtrQuery# [5000:1:659:0:0:100000:1] sh# 257 sz# 99743 c# 659}{ExtrQuery# [5000:1:660:0:0:100000:1] sh# 257 sz# 99743 c# 660}{ExtrQuery# [5000:1:661:0:0:100000:1] sh# 257 sz# 99743 c# 661}{ExtrQuery# [5000:1:662:0:0:100000:1] sh# 257 sz# 99743 c# 662}{ExtrQuery# [5000:1:663:0:0:100000:1] sh# 257 sz# 99743 c# 663}{ExtrQuery# [5000:1:664:0:0:100000:1] sh# 257 sz# 99743 c# 664}{ExtrQuery# [5000:1:665:0:0:100000:1] sh# 257 sz# 99743 c# 665}{ExtrQuery# [5000:1:666:0:0:100000:1] sh# 257 sz# 99743 c# 666}{ExtrQuery# [5000:1:667:0:0:100000:1] sh# 257 sz# 99743 c# 667}{ExtrQuery# [5000:1:668:0:0:100000:1] sh# 257 sz# 99743 c# 668}{ExtrQuery# [5000:1:669:0:0:100000:1] sh# 257 sz# 99743 c# 669}{ExtrQuery# [5000:1:670:0:0:100000:1] sh# 257 sz# 99743 c# 670}{ExtrQuery# [5000:1:671:0:0:100000:1] sh# 257 sz# 99743 c# 671}{ExtrQuery# [5000:1:672:0:0:17027:1] sh# 257 sz# 16770 c# 672} {MsgQoS} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0}; VDISK CAN NOT REPLY ON TEvVGet REQUEST >> TBlobStorageProxyTest::TestEmptyDiscover >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> TBlobStorageProxyTest::TestDoubleGroups >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> KqpScan::ScanPg [GOOD] >> DataStreams::ListStreamsValidation [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> TBlobStorageProxyTest::TestDoubleEmptyGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2024-11-21T23:05:53.848577Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:05:54.077963Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:05:54.143064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:05:54.143441Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:05:54.144534Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:05:54.190021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2024-11-21T23:05:54.190317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:54.190519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:54.190704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:54.190802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:54.190926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:54.191054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:54.191164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:54.191280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:54.191407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:54.191527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:54.191654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:54.191759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:54.211889Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:54.221839Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:05:54.222235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2024-11-21T23:05:54.222330Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2024-11-21T23:05:54.222660Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:283;tasks_for_remove=0; 2024-11-21T23:05:54.223021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:54.223158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2024-11-21T23:05:54.223200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2024-11-21T23:05:54.224166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:54.224741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:05:54.225114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:05:54.225243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2024-11-21T23:05:54.225603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:05:54.225761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:05:54.225975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:05:54.226003Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2024-11-21T23:05:54.226924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:54.227218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:05:54.227255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:05:54.227389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2024-11-21T23:05:54.228018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:05:54.228184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:05:54.228327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:05:54.228353Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:05:54.229716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:05:54.229841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:05:54.229872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:05:54.229913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:05:54.229946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:05:54.230043Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:05:54.230442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=70; 2024-11-21T23:05:54.230543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2024-11-21T23:05:54.230622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2024-11-21T23:05:54.230694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2024-11-21T23:05:54.230842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:05:54.230892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:05:54.230921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:05:54.231086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:05:54.231121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:05:54.231149Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:05:54.231280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CL ... names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.158837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T23:06:13.158871Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:06:13.158987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T23:06:13.161480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.161513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T23:06:13.161539Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:06:13.170596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T23:06:13.170964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T23:06:13.171305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=2; 2024-11-21T23:06:13.171902Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2024-11-21T23:06:13.172419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T23:06:13.176945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.176987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2024-11-21T23:06:13.177024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:06:13.183995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:06:13.185343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.186760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:06:13.188751Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2024-11-21T23:06:13.189517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2024-11-21T23:06:13.190259Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:361:2365] send ScanData to [1:359:2364] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 2405760 rows: 20048 page faults: 0 finished: 0 pageFault: 0 arrow schema: key1: uint64 key2: uint64 field: string 2024-11-21T23:06:13.214183Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.214697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.215464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.217265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:06:13.231652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.234600Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2024-11-21T23:06:13.234658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:361:2365] finished for tablet 9437184 2024-11-21T23:06:13.235397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:361:2365] send ScanData to [1:359:2364] txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:06:13.252012Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:361:2365] and sent to [1:359:2364] packs: 0 txId: 111 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.002},{"events":["f_ProduceResults"],"t":0.044},{"events":["l_bootstrap"],"t":0.083},{"events":["f_processing"],"t":0.106},{"events":["f_task_result"],"t":0.107},{"events":["l_task_result"],"t":1.49},{"events":["f_ack"],"t":1.491},{"events":["l_ProduceResults","f_Finish"],"t":1.541},{"events":["l_ack","l_processing","l_Finish"],"t":1.543}],"full":{"a":1732230371692947,"name":"_full_task","f":1732230371692947,"d_finished":0,"c":0,"l":1732230373236166,"d":1543219},"events":[{"name":"bootstrap","f":1732230371695635,"d_finished":80375,"c":1,"l":1732230371776010,"d":80375},{"a":1732230373216901,"name":"ack","f":1732230373183966,"d_finished":31857,"c":1,"l":1732230373215823,"d":51122},{"a":1732230373216882,"name":"processing","f":1732230371799683,"d_finished":1326793,"c":9,"l":1732230373215826,"d":1346077},{"name":"ProduceResults","f":1732230371737623,"d_finished":70387,"c":12,"l":1732230373234637,"d":70387},{"a":1732230373234641,"name":"Finish","f":1732230373234641,"d_finished":0,"c":0,"l":1732230373236166,"d":1525},{"name":"task_result","f":1732230371799972,"d_finished":1294519,"c":8,"l":1732230373183802,"d":1294519}],"id":"9437184::2"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;) 2024-11-21T23:06:13.252135Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:06:11.686303Z;index_granules=0;index_portions=1;index_batches=1;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=258720;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=258720;selected_rows=0; 2024-11-21T23:06:13.253709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:06:13.256464Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.096680s;steps_10Ms:[{name=ASSEMBLER::SPEC;duration=0.013328s;size=0.000320768;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.070835s;size=0.000320768;details={columns=(column_ids=1,2;column_names=key1,key2;);;};};]};; 2024-11-21T23:06:13.257978Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:361:2365];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |79.4%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2024-11-21T23:04:48.919542Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872436253202476:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:48.919601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c06/r3tmp/tmpk4Zq1m/pdisk_1.dat 2024-11-21T23:04:53.340818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:53.519848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:53.519957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:53.573770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:53.694729Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26852, node 1 2024-11-21T23:04:54.094838Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872436253202476:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:54.104457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:04:54.106212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:54.106229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:54.106235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:54.106319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:04:54.117917Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:04:54.117943Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:04:54.124869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:04:54.127125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:54.127273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:04:54.154650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:54.156172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:54.156204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:54.156398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:04:54.156593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:59.889214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:59.938130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:59.940005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:59.945222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:04:59.951917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:04:59.952155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:04:59.966617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:04:59.966641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:04:59.983734Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:59.992112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:05:00.031183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230300045, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:05:00.031224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:05:00.031491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:05:00.044457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:05:00.044703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:05:00.044754Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:05:00.044850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:05:00.044885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:05:00.044942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:05:00.047236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:05:00.047276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:05:00.047292Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:05:00.047358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:05:01.359211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:05:01.367258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:05:01.367281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:05:01.367591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:05:01.367909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:05:01.367919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:05:01.420485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:05:01.421346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:05:01.428535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:05:01.433113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:05:01.433150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:05:01.433164Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:05:01.433228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:8590 2024-11-21T23:05:04.978572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:05:04.979741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:05:04.979760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:05:04.992448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:05:04.992584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:05:05.028248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:05:05.045595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230305071, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:05:05.045904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:17 ... 658146131:7762515]; 2024-11-21T23:06:03.056947Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c06/r3tmp/tmpVqcTWm/pdisk_1.dat 2024-11-21T23:06:03.962621Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:04.085893Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:04.120351Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:04.120704Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:04.140924Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16008, node 10 2024-11-21T23:06:04.776736Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:04.776938Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:04.776949Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:04.777085Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:06.252986Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:06:06.253947Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:06.253966Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:06:06.276937Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:06:06.278041Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:06:06.278057Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:06:06.320622Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:06:06.321263Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:06:06.355427Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:06:06.359720Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:06:06.414897Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230366412, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:06:06.414939Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:06:06.415533Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:06:06.470362Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:06.471110Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:06.471172Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:06:06.471257Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:06:06.471300Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:06:06.471867Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:06:06.497594Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:06:06.497657Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:06:06.497678Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:06:06.498132Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T23:06:06.741531Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:06:06.741834Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:06.741861Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:06:06.741936Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T23:06:06.742193Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T23:06:06.742216Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 1, subscribers: 0 2024-11-21T23:06:06.746982Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:06:06.747154Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:06.747430Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:06.747939Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:06:06.747973Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:06:06.747994Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:06:06.748077Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 0 2024-11-21T23:06:08.070604Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439872759033128581:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:08.071145Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:17526 2024-11-21T23:06:08.549987Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:06:08.550204Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:08.550226Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:06:08.565088Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:06:08.565308Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:08.573297Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T23:06:08.578328Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230368617, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:06:08.578380Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976715659:0, stepId:1732230368617, at schemeshard: 72057594046644480 2024-11-21T23:06:08.578677Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T23:06:08.578770Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T23:06:08.578805Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T23:06:08.586586Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:08.586774Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:08.588777Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:06:08.588827Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:06:08.588847Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:06:08.588929Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 >> BuildStatsHistogram::Ten_Serial >> TBlobStorageProxyTest::TestQuadrupleGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2024-11-21T23:04:48.804204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:04:48.854266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:04:48.859383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:48.859685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:04:48.862248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:04:48.862326Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00286b/r3tmp/tmpciP70b/pdisk_1.dat 2024-11-21T23:04:50.343763Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:51.922556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:04:52.288979Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:04:52.313397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:52.314474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:52.324008Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:04:52.332281Z node 2 :TX_PROXY DEBUG: actor# [2:193:2108] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:04:52.336818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:52.336894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:52.337282Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2024-11-21T23:04:52.368070Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:04:52.371086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:52.372984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:52.933124Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Handle TEvProposeTransaction 2024-11-21T23:04:52.933471Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:04:52.934237Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1164:2708] 2024-11-21T23:04:53.081485Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:04:53.083222Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:04:53.083370Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:04:53.083846Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:04:53.084048Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:04:53.084175Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:04:53.084498Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:04:53.085962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.093768Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:04:53.093871Z node 1 :TX_PROXY DEBUG: Actor# [1:1164:2708] txid# 281474976715657 SEND to# [1:1071:2648] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:04:53.431921Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1217:2356] 2024-11-21T23:04:53.432155Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:53.489690Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:53.489983Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:04:53.491818Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:04:53.492112Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:04:53.492378Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:04:53.494197Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:04:53.576886Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:04:53.577582Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:04:53.578765Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1241:2371] 2024-11-21T23:04:53.579327Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:04:53.579603Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:04:53.579866Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:04:53.582639Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:04:53.582982Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:04:53.584055Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:04:53.584303Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:04:53.584533Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:04:53.584778Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:04:53.664203Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1198:2738], serverId# [2:1245:2372], sessionId# [0:0:0] 2024-11-21T23:04:53.665074Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:04:53.665781Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:04:53.666153Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:04:53.670192Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:04:53.685666Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:04:53.685832Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:04:54.120319Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1273:2759], serverId# [2:1276:2381], sessionId# [0:0:0] 2024-11-21T23:04:54.136662Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 994 RawX2: 4294969882 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:04:54.136771Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:04:54.137140Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:04:54.137196Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:04:54.137253Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:04:54.137581Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:04:54.137765Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:04:54.138877Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:04:54.138969Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:04:54.139534Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:04:54.140965Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:04:54.146742Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:04:54.146912Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:04:54.168268Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:04:54.171395Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:04:54.171504Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:04:54.171639Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:04:54.176835Z node ... QP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=. TraceId : 01jd8fjyp4ewj83qevy2qr4n4p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll inputs 2024-11-21T23:05:51.587858Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=. TraceId : 01jd8fjyp4ewj83qevy2qr4n4p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Poll sources 2024-11-21T23:05:51.587883Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=. TraceId : 01jd8fjyp4ewj83qevy2qr4n4p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Resume execution, run status: Finished 2024-11-21T23:05:51.587901Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=. TraceId : 01jd8fjyp4ewj83qevy2qr4n4p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. ProcessOutputsState.Inflight: 0 2024-11-21T23:05:51.588230Z node 3 :KQP_COMPUTE TRACE: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=. TraceId : 01jd8fjyp4ewj83qevy2qr4n4p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Do not drain channelId: 1, finished 2024-11-21T23:05:51.588535Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=. TraceId : 01jd8fjyp4ewj83qevy2qr4n4p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. All outputs have been finished. Consider finished 2024-11-21T23:05:51.588563Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. Tasks execution finished 2024-11-21T23:05:51.588589Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1572:2943], TxId: 281474976715664, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=. TraceId : 01jd8fjyp4ewj83qevy2qr4n4p. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2024-11-21T23:05:51.588658Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2024-11-21T23:05:51.588734Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:05:51.588859Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T23:05:51.589001Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2903] TxId: 281474976715664. Ctx: { TraceId: 01jd8fjyp4ewj83qevy2qr4n4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1572:2943], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 11780 DurationUs: 14000 Tasks { TaskId: 1 CpuTimeUs: 5719 FinishTimeMs: 1732230351587 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 2307 BuildCpuTimeUs: 3412 HostName: "ghrun-uc25mmfz34" NodeId: 3 StartTimeMs: 1732230351573 } MaxMemoryUsage: 1048576 } 2024-11-21T23:05:51.589033Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01jd8fjyp4ewj83qevy2qr4n4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1572:2943] 2024-11-21T23:05:51.589102Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2903] TxId: 281474976715664. Ctx: { TraceId: 01jd8fjyp4ewj83qevy2qr4n4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:05:51.589136Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1569:2903] TxId: 281474976715664. Ctx: { TraceId: 01jd8fjyp4ewj83qevy2qr4n4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T23:05:51.589412Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1569:2903] TxId: 281474976715664. Ctx: { TraceId: 01jd8fjyp4ewj83qevy2qr4n4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDQ0MWMwNTctZDIzZmFjY2ItYmNkNzZjYWQtYjUxY2ViYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.011780s ReadRows: 0 ReadBytes: 0 ru: 7 rate limiter was not found force flag: 1 2024-11-21T23:05:51.593510Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2024-11-21T23:05:51.594216Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] Handle TEvProposeTransaction 2024-11-21T23:05:51.594248Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] TxId# 0 ProcessProposeTransaction 2024-11-21T23:05:51.595245Z node 3 :TX_PROXY DEBUG: actor# [3:164:2150] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1574:2944] SnapshotReq marker# P0 2024-11-21T23:05:51.599451Z node 3 :TX_PROXY DEBUG: Actor# [3:1576:2944] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2024-11-21T23:05:51.600273Z node 3 :TX_PROXY DEBUG: Actor# [3:1576:2944] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2024-11-21T23:05:51.601015Z node 3 :TX_PROXY DEBUG: Actor# [3:1574:2944] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2024-11-21T23:06:05.415023Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:06:05.415470Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:05.415647Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:06:05.418666Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:06:05.418931Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:06:05.419770Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00286b/r3tmp/tmp92La0q/pdisk_1.dat 2024-11-21T23:06:06.166584Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:06.542025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:06:06.662792Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:06.662994Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:06.667349Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:06.667461Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:06.683587Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2024-11-21T23:06:06.684243Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:06.684666Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:07.156252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:06:08.569586Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1329:2798], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:08.569714Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1340:2803], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:08.570126Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:08.576069Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:09.224267Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1343:2806], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:06:10.212401Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fkk9q4nfsxhh3fe1dkpb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTk1NTEzMTMtNjI4NTk5ZjYtNzc3ZWQ1ZDUtOWVlNjk0Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:06:11.246684Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fkmy92hneg4m68pbtcb12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTRiMjA1MjEtYzllYzNmMDItYzIxYzFhMjAtMmRmYWE1Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:06:15.305911Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fkmy92hneg4m68pbtcb12, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=ZTRiMjA1MjEtYzllYzNmMDItYzIxYzFhMjAtMmRmYWE1Yjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:06:15.380580Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TChargeBTreeIndex::NoNodes >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2024-11-21T23:04:48.373007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872436276388288:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:48.458543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bdf/r3tmp/tmpgUOAHD/pdisk_1.dat 2024-11-21T23:04:50.186507Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:51.581491Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:51.867999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:51.868375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:51.953378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:52.530842Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.115506s TServer::EnableGrpc on GrpcPort 18005, node 1 2024-11-21T23:04:53.621202Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872436276388288:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:53.621256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:53.621290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:04:53.731470Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:53.731493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:53.731504Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:53.731607Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:54.159505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.169168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:54.169251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.186854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:04:54.187063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:04:54.187085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:04:54.190842Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:04:54.190879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:04:54.193233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.193508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:54.196853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230294242, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:54.196898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:54.197214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:04:54.198885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:54.199020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:54.199059Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:04:54.199133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:04:54.199161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:04:54.199196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:04:54.201455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:04:54.201505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:04:54.201520Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:04:54.201598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:04:54.324651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.324936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:54.324954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.325018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:04:54.325096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:04:54.325107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:04:54.329521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:04:54.329652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:54.329875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:54.330827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:04:54.330852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:04:54.330861Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:04:54.330940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:28021 2024-11-21T23:04:55.874527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:55.874738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:55.874757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:55.876922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:04:55.877077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:04:55.882024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230295929, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:55.882061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230295929, at schemeshard: 72057594046644480 2024-11-21T23:04:55.882271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:04:55.882346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:04:55.882412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:04:55.889294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:55.889465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:55.891027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:04:55.891073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:04:55.891092Z node 1 :FLA ... , operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:06:00.979089Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:00.979108Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:06:00.979170Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:06:00.979238Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:06:00.979249Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:06:01.000327Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:06:01.000477Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:01.000724Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:01.010087Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:06:01.010140Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:06:01.010161Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:06:01.010247Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2024-11-21T23:06:01.389072Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439872730006395175:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:01.389489Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:19628 2024-11-21T23:06:02.636320Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:06:02.636800Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:02.637394Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:06:02.647743Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:06:02.648235Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:02.667193Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230362709, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:06:02.667368Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230362709, at schemeshard: 72057594046644480 2024-11-21T23:06:02.668431Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:06:02.668521Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:06:02.668554Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:06:02.668889Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:06:02.670775Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:02.670970Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:02.672696Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:06:02.672740Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:06:02.672760Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:06:02.672831Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T23:06:02.741767Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetRecords1MBMessagesOneByOneBySeqNo, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:06:02.742175Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:02.747365Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestGetRecords1MBMessagesOneByOneBySeqNo 2024-11-21T23:06:02.747585Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:02.747809Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:02.747874Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T23:06:02.749237Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:06:02.749294Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:06:02.749315Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:06:02.750780Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:06:02.750817Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:06:02.750833Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T23:06:02.751838Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T23:06:02.767336Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:02.767666Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:02.767706Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 1 -> 3 2024-11-21T23:06:02.778601Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:06:02.809416Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:02.817705Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:02.817746Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 3 -> 128 2024-11-21T23:06:02.820535Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:06:02.831985Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230362877, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:06:02.832081Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976710660:0 HandleReply TEvOperationPlan, step: 1732230362877, at tablet: 72057594046644480 2024-11-21T23:06:02.832375Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 128 -> 240 2024-11-21T23:06:02.835294Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:02.835860Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:02.835963Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:0 ProgressState 2024-11-21T23:06:02.836130Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:0 progress is 1/1 2024-11-21T23:06:02.836188Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2024-11-21T23:06:02.836409Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710660, publications: 2, subscribers: 1 2024-11-21T23:06:02.839834Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:06:02.839899Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:06:02.839923Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T23:06:02.840211Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:06:02.840233Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:06:02.840245Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:06:02.840297Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710660, subscribers: 1 2024-11-21T23:06:11.991826Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:06:11.991856Z node 10 :IMPORT WARN: Table profiles were not loaded >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::WideKey >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> DBase::WideKey [GOOD] >> DBase::Outer >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> TIterator::Single >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> TPartBtreeIndexIteration::NoNodes_Groups >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> KqpWorkloadServiceDistributed::TestDistributedQueue |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |79.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> TScreen::Sequential [GOOD] >> TScreen::Random >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> KqpWorkloadServiceActors::TestPoolFetcher |79.4%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Bloom::Conf [GOOD] >> Bloom::Hashes >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> TFlatTableExecutorResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutorResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutorSliceOverlapScan::TestSliceOverlapScan >> Bloom::Hashes [GOOD] >> Bloom::Rater |79.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> Bloom::Rater [GOOD] >> Bloom::Dipping >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag >> TFlatTableBackgroundCompactions::TestRunBackgroundSnapshot >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TFlatTableBackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableCold::ColdBorrowScan >> TFlatTableCold::ColdBorrowScan [GOOD] >> TFlatTableCompactionScan::TestCompactionScan >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups >> TFlatTableCompactionScan::TestCompactionScan [GOOD] >> TFlatTableDatetime::TestDate >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_Default >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |79.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::FewNodes >> TPersQueueTest::DirectReadNotCached [GOOD] >> TPersQueueTest::DirectReadBadCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2024-11-21T23:04:52.450251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:04:52.524822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:04:52.528852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:52.529494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:04:52.536339Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:04:52.536407Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002850/r3tmp/tmpOmpeZC/pdisk_1.dat 2024-11-21T23:04:53.696436Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:53.945535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.061709Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:04:54.064024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:54.064162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:54.065266Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:04:54.065775Z node 2 :TX_PROXY DEBUG: actor# [2:193:2108] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:04:54.070605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:54.070728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:54.071332Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2024-11-21T23:04:54.084426Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:04:54.084863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:54.085179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:54.552476Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Handle TEvProposeTransaction 2024-11-21T23:04:54.552550Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:04:54.552714Z node 1 :TX_PROXY DEBUG: actor# [1:164:2150] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1161:2705] 2024-11-21T23:04:55.088433Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:04:55.091770Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:04:55.092723Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:04:55.094639Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:04:55.096143Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:04:55.097348Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:04:55.161031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:55.177518Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:04:55.236630Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:04:55.237152Z node 1 :TX_PROXY DEBUG: Actor# [1:1161:2705] txid# 281474976715657 SEND to# [1:1071:2648] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:04:55.854137Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1240:2764] 2024-11-21T23:04:55.854506Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:55.948587Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1242:2765] 2024-11-21T23:04:55.948826Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:55.998240Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:56.006625Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:04:56.008258Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T23:04:56.008343Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T23:04:56.008413Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T23:04:56.008801Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:04:56.135170Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T23:04:56.135376Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:04:56.135495Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:1333:2812] 2024-11-21T23:04:56.135531Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T23:04:56.135562Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T23:04:56.135596Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:04:56.136382Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T23:04:56.136469Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T23:04:56.136625Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:04:56.136660Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:04:56.136704Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T23:04:56.136739Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:04:56.137107Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:1263:2777], serverId# [1:1301:2795], sessionId# [0:0:0] 2024-11-21T23:04:56.137302Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T23:04:56.137506Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:04:56.137584Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T23:04:56.140086Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1244:2766] 2024-11-21T23:04:56.140309Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:04:56.160702Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:04:56.163449Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:04:56.171401Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2024-11-21T23:04:56.171484Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2024-11-21T23:04:56.171531Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2024-11-21T23:04:56.171805Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:04:56.171866Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2024-11-21T23:04:56.171949Z node 1 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:04:56.172041Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [1:1352:2823] 2024-11-21T23:04:56.172071Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2024-11-21T23:04:56.172096Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2024-11-21T23:04:56.172122Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2024-11-21T23:04:56.173238Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2024-11-21T23:04:56.173310Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2024-11-21T23:04:56.173947Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2024-11-21T23:04:56.173986Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:04:56.174015Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2024-11-21T23:04:56.174050Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2024-11-21T23:04:56.174233Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [1:1268:2782], serverId# [1:1329:2809], sessionId# [0:0:0] 2024-11-21T23:04:56.175181Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2024-11-21T23:04:56.175344Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:04:56.175410Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037894 2024-11-21T23:04:56.215129Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 720575 ... 6715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1898:3123], 2024-11-21T23:06:35.908411Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1895:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [5:1569:2936], seqNo: 1, nRows: 1 2024-11-21T23:06:35.908508Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:06:35.908539Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2024-11-21T23:06:35.908568Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2024-11-21T23:06:35.908600Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2024-11-21T23:06:35.908651Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2024-11-21T23:06:35.908696Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2024-11-21T23:06:35.908729Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T23:06:35.908776Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T23:06:35.908952Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1895:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1898:3123], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 457 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 174 FinishTimeMs: 1732230395907 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 54 BuildCpuTimeUs: 120 HostName: "ghrun-uc25mmfz34" NodeId: 5 StartTimeMs: 1732230395906 } MaxMemoryUsage: 1048576 } 2024-11-21T23:06:35.908993Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1895:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1898:3123], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2024-11-21T23:06:35.909308Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1899:3123] 2024-11-21T23:06:35.909380Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2024-11-21T23:06:35.909436Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2024-11-21T23:06:35.909463Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2024-11-21T23:06:35.909541Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:06:35.909578Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2024-11-21T23:06:35.909611Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2024-11-21T23:06:35.909637Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2024-11-21T23:06:35.909660Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2024-11-21T23:06:35.909684Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2024-11-21T23:06:35.909721Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T23:06:35.909750Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2024-11-21T23:06:35.909789Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1898:3123], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jd8fm1gsexk9t26ys3qpbt74. SessionId : ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T23:06:35.909911Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2024-11-21T23:06:35.910003Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:06:35.910147Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T23:06:35.910350Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1895:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1898:3123], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1970 DurationUs: 3000 Tasks { TaskId: 1 CpuTimeUs: 176 FinishTimeMs: 1732230395909 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 56 BuildCpuTimeUs: 120 HostName: "ghrun-uc25mmfz34" NodeId: 5 StartTimeMs: 1732230395906 } MaxMemoryUsage: 1048576 } 2024-11-21T23:06:35.910612Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1898:3123] 2024-11-21T23:06:35.910713Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1895:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:06:35.910754Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1895:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T23:06:35.910794Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1895:2936] TxId: 281474976715667. Ctx: { TraceId: 01jd8fm1gsexk9t26ys3qpbt74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001970s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T23:06:35.911029Z node 5 :KQP_SLOW_LOG WARN: TraceId: "01jd8fm1gsexk9t26ys3qpbt74", SessionId: ydb://session/3?node_id=5&id=MTRiZTczZGQtNWY1OTE5NzUtZDE2NTZlYzQtNTA5MjYyYjI=, Slow query, duration: 12.781365s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT sum(value) FROM `/Root/table-1`;", parameters: 0b ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 5486 |79.5%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |79.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |79.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> Bloom::Dipping [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True >> Bloom::Basics [GOOD] >> Bloom::Stairs >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TFlatTableExecutorSliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutorStickyPages::TestNonSticky_FlatIndex >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |79.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TFlatTableExecutorStickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutorStickyPages::TestNonSticky_BTreeIndex >> TPersQueueTest::Init [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks >> TFlatTableExecutorStickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutorStickyPages::TestSticky |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TFlatTableExecutorStickyPages::TestSticky [GOOD] >> TFlatTableExecutorStickyPages::TestNonStickyGroup_FlatIndex >> TFlatTableExecutorStickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutorStickyPages::TestNonStickyGroup_BTreeIndex >> TFlatTableExecutorStickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutorStickyPages::TestStickyMain >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> TFlatTableExecutorStickyPages::TestStickyMain [GOOD] >> TFlatTableExecutorStickyPages::TestStickyAlt_FlatIndex >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> TFlatTableExecutorStickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutorStickyPages::TestStickyAlt_BTreeIndex >> TFlatTableExecutorStickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutorStickyPages::TestStickyAll >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TFlatTableExecutorStickyPages::TestStickyAll [GOOD] >> TFlatTableExecutorStickyPages::TestAlterAddFamilySticky >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> TFlatTableExecutorStickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutorStickyPages::TestAlterAddFamilyPartiallySticky >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_False >> TFlatTableExecutorStickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutorVersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TFlatTableExecutorVersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutorVersionedRows::TestVersionedRows >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> TFlatTableExecutorVersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutorVersionedRows::TestVersionedRowsSmallBlobs >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> TSharedPageCache::S3FIFO >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TGroupMapperTest::Block42_1disk [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::ClockPro >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |79.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |79.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> test.py::test[solomon-InvalidProject-] [GOOD] >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> test.py::test[solomon-LabelColumns-default.txt] >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> KqpWorkloadService::TestQueueSizeSimple >> TSharedPageCache::ClockPro [GOOD] >> TSharedPageCache::ReplacementPolicySwitch >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> TSwitchableCache::UpdateLimit [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TSwitchableCache::Switch_EvictNext [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TVersions::WreckHead >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |79.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> KqpStreamLookup::ReadTableDuringSplit >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2024-11-21T23:04:45.088736Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872427142709598:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:45.088770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bec/r3tmp/tmpkNZEn7/pdisk_1.dat 2024-11-21T23:04:47.033186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:48.058631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:48.890089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:48.890196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:48.904825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11888, node 1 2024-11-21T23:04:50.201022Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872427142709598:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:50.201265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:04:50.202149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:04:50.202158Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:04:50.202162Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:04:50.202231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:04:50.263213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T23:04:50.263746Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:53.014963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.027460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:53.027532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.031186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:04:53.031358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:04:53.031383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:04:53.033150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:53.033556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:04:53.033574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:04:53.034922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.037903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230293080, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:53.037936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:53.038223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:04:53.039763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:53.039899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:53.039938Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:04:53.040051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:04:53.040078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:04:53.040111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:04:53.044421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:04:53.044489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:04:53.044508Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:04:53.044606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:04:53.465157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.465373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:53.465573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.465658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:04:53.465732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:04:53.465746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:04:53.467999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:04:53.468150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:53.468437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:53.469300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:04:53.469331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:04:53.469345Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:04:53.469457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:7208 2024-11-21T23:04:54.024957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.025296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:54.025350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:04:54.035418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:04:54.035616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:04:54.043771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:04:54.051231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230294088, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:54.051275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230294088, at schemeshard: 72057594046644480 2024-11-21T23:04:54.051526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:04:54.051605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:04:54.051683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T23:04:54.054756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:54.054932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:54.056386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:04:54.056429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard ... 1091Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:06:36.471128Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:06:36.471248Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:06:36.580206Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:06:36.580540Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:36.580566Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:06:36.580653Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:06:36.580744Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:06:36.580761Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:06:36.591451Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusSuccess, operation: MODIFY ACL, path: Root, add access: +R:user@builtin, add access: +W:user@builtin 2024-11-21T23:06:36.591667Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:36.592049Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:36.593449Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:06:36.593499Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:06:36.593522Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:06:36.593626Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 TClient is connected to server localhost:4648 2024-11-21T23:06:36.992054Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:06:36.992313Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:36.992351Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:06:36.996639Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:06:36.996918Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:37.003217Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:06:37.012413Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230397051, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:06:37.012465Z node 10 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710659:0, stepId:1732230397051, at schemeshard: 72057594046644480 2024-11-21T23:06:37.012882Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:06:37.013001Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:06:37.013049Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2024-11-21T23:06:37.025480Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:37.025761Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:37.026636Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:06:37.026690Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:06:37.026714Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:06:37.026827Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T23:06:37.096354Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/stream_TestGetRecordsWithBigSeqno, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:06:37.096959Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:06:37.104544Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: user@builtin, status: StatusAccepted, operation: CREATE PERSISTENT QUEUE, path: /Root/stream_TestGetRecordsWithBigSeqno 2024-11-21T23:06:37.104876Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:37.105211Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:37.105299Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 ProgressState, operation type: TxCreatePQGroup, at tablet72057594046644480 2024-11-21T23:06:37.108085Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:06:37.108167Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:06:37.108200Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:06:37.108548Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:06:37.108575Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:06:37.108590Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T23:06:37.110540Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T23:06:37.127866Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:37.128342Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710660:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:06:37.128403Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 1 -> 3 2024-11-21T23:06:37.136219Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:06:40.792153Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:41.006573Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TConfigureParts operationId#281474976710660:0 HandleReply TEvUpdateConfigResponse at tablet72057594046644480 2024-11-21T23:06:41.006625Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 3 -> 128 2024-11-21T23:06:41.023483Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:06:41.038368Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230401076, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:06:41.038460Z node 10 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976710660:0 HandleReply TEvOperationPlan, step: 1732230401076, at tablet: 72057594046644480 2024-11-21T23:06:41.038724Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 128 -> 240 2024-11-21T23:06:41.042346Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:06:41.042846Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:06:41.042931Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:0 ProgressState 2024-11-21T23:06:41.043080Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:0 progress is 1/1 2024-11-21T23:06:41.043135Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2024-11-21T23:06:41.043320Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710660, publications: 2, subscribers: 1 2024-11-21T23:06:41.047400Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:06:41.047465Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:06:41.047503Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 8 2024-11-21T23:06:41.047772Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:06:41.047801Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:06:41.047816Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:06:41.047861Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710660, subscribers: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:229:2060] recipient: [1:212:2140] 2024-11-21T23:05:38.276038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:05:38.286990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:38.287062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:05:38.287103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:05:38.287144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:05:38.287167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:05:38.287213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:38.287504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:05:38.849014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:05:38.849416Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:39.010068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:05:39.010505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:05:39.011576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:05:39.135933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:05:39.136208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:05:39.136733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:39.136935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:05:39.139638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:39.140864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:39.140925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:39.141079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:05:39.141135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:39.141171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:05:39.141295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:05:39.183032Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:226:2148] sender: [1:339:2060] recipient: [1:17:2064] 2024-11-21T23:05:40.951372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:05:40.951619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:40.951839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:05:40.952102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:05:40.952159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:40.992446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:40.992630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:05:40.992912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:40.992991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:05:40.993031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:05:40.993068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:05:40.996965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:40.997086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:05:40.997127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:05:40.999418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:40.999487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:40.999539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:40.999583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:05:41.015616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:05:41.026704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:05:41.028047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:05:41.035038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:41.035197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:05:41.035250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:41.035514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:05:41.035583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:41.035758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:41.035840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:05:41.039714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:41.039791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:41.040009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:41.040056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:306:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:05:41.040540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:41.040606Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:05:41.040738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:05:41.040789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:41.040850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:05:41.040897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:41.040938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:05:41.040971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:05:41.041046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:05:41.041097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:05:41.041132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:05:41.044381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:41.045189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:41.045456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:05:41.045683Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:05:41.046351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:41.048112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ablet# 72057594046678944 2024-11-21T23:06:45.305749Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:06:45.305786Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:45.305884Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T23:06:45.305928Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:45.305992Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:2 2024-11-21T23:06:45.306121Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:950:2728] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T23:06:45.306438Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:229:2151], Recipient [7:950:2728]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2024-11-21T23:06:45.306490Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T23:06:45.306532Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2024-11-21T23:06:45.306600Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2024-11-21T23:06:45.306869Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T23:06:45.306916Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T23:06:45.306970Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2024-11-21T23:06:45.307015Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2024-11-21T23:06:45.307139Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:45.307179Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2024-11-21T23:06:45.307215Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2024-11-21T23:06:45.307268Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T23:06:45.309094Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:06:45.309132Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:45.309161Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2024-11-21T23:06:45.309229Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:959:2735] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T23:06:45.309353Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:229:2151], Recipient [7:959:2735]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2024-11-21T23:06:45.309405Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T23:06:45.309436Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2024-11-21T23:06:45.309485Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2024-11-21T23:06:45.309746Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T23:06:45.309787Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T23:06:45.309837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:06:45.309925Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T23:06:45.310002Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:45.310028Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2024-11-21T23:06:45.310052Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T23:06:45.310088Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2024-11-21T23:06:45.310149Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:571:2402] message: TxId: 104 2024-11-21T23:06:45.310200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T23:06:45.310268Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T23:06:45.310307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T23:06:45.310448Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2024-11-21T23:06:45.310492Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2024-11-21T23:06:45.310512Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2024-11-21T23:06:45.310536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2024-11-21T23:06:45.310554Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2024-11-21T23:06:45.310572Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2024-11-21T23:06:45.310605Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2024-11-21T23:06:45.312991Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:45.313111Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:06:45.313203Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:571:2402] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2024-11-21T23:06:45.313409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:06:45.313471Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1012:2776] 2024-11-21T23:06:45.313772Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1014:2778], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:45.313811Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:45.313834Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T23:06:45.314833Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:545:2100], Recipient [7:229:2151] 2024-11-21T23:06:45.314894Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:06:45.317181Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:06:45.317701Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2024-11-21T23:06:45.317760Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2024-11-21T23:06:45.318018Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:06:45.320672Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:06:45.320955Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2024-11-21T23:06:45.321022Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2024-11-21T23:06:45.321514Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2024-11-21T23:06:45.321560Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2024-11-21T23:06:45.321970Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1078:2842], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:06:45.322021Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:06:45.322059Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:06:45.322202Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:571:2402], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2024-11-21T23:06:45.322234Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T23:06:45.322304Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2024-11-21T23:06:45.322423Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:06:45.322484Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1076:2840] 2024-11-21T23:06:45.322730Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1078:2842], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:45.322774Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:06:45.322810Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |79.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> Memtable::Wreck [GOOD] >> Memtable::Erased >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> TBlobStorageProxyTest::TestProxySimpleDiscover >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> TBlobStorageProxyTest::TestVPutVGet >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TClockProCache::Touch [GOOD] >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random >> TClockProCache::Random [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NFwd_TFlatIndexCache::End [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 0} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > (2) | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > (4) | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > (6) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > (8) | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > (10) | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > (12) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > (14) | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > (16) | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > (18) | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > (20) | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > (22) | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > (24) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > (26) | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > (28) | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > (30) | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > (32) | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > (34) | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > (36) | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > (38) | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> TFlatTableExecutorBTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutorCachePressure::TestNotEnoughLocalCache >> TFlatTableExecutorCachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutorColumnGroups::TestManyRows |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |79.5%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2024-11-21T23:06:47.057566Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002afc/r3tmp/tmpTlTzM9//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T23:06:47.110365Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002afc/r3tmp/tmpTlTzM9//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T23:06:47.154623Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:06:47.154872Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2024-11-21T23:06:42.328926Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002af4/r3tmp/tmpQJTNjx//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T23:06:42.423604Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:06:44.105804Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002af4/r3tmp/tmpQJTNjx//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 2 2024-11-21T23:06:44.115591Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:06:46.474748Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002af4/r3tmp/tmpQJTNjx//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 3 2024-11-21T23:06:46.528066Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:06:48.449746Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002af4/r3tmp/tmpQJTNjx//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 4 2024-11-21T23:06:48.603627Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:3:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:06:50.489408Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002af4/r3tmp/tmpQJTNjx//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 5 2024-11-21T23:06:50.582978Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:4:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:06:52.916532Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002af4/r3tmp/tmpQJTNjx//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 6 2024-11-21T23:06:52.952340Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:5:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels >> Cache::Test5 >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> TFlatTableExecutorColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutorCompressedSelectRows::TestCompressedSelectRows >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels >> TFlatTableExecutorCompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutorFollower::BasicFollowerRead [GOOD] >> TFlatTableExecutorFollower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> TFlatTableExecutorIndexLoading::CalculateReadSize_FlatIndex >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> EntityId::Order >> TFlatTableExecutorIndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutorIndexLoading::CalculateReadSize_BTreeIndex >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2024-11-21T23:05:50.105762Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:05:50.194439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:05:50.221767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:05:50.221845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:05:50.222097Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:05:50.238599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:05:50.239215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:50.239438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:50.239561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:50.239674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:50.239769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:50.239865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:50.240004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:50.240148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:50.240265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:50.240355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:50.240459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:50.373177Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:50.420681Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:05:50.421997Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:05:50.422617Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:05:50.424625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:50.427080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:05:50.427775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:05:50.429283Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:05:50.429356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:05:50.429405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:05:50.429440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:05:50.429465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:05:50.429617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:50.429675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:05:50.429709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:05:50.429776Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:05:50.429857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:05:50.429902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:05:50.429934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:05:50.429960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:05:50.430013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:05:50.430041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:05:50.430062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:05:50.430093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:05:50.430122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:05:50.430145Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:05:50.430284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2024-11-21T23:05:50.430354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2024-11-21T23:05:50.430471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=69; 2024-11-21T23:05:50.430538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2024-11-21T23:05:50.430720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:05:50.430763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:05:50.430788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:05:50.430955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:05:50.430985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:05:50.431008Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:05:50.431120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:05:50.431158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:05:50.431187Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:05:50.431324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:05:50.431353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:05:50.431376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:05:50.431491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... common_data.cpp:29;EXECUTE:finishLoadingTime=5043; 2024-11-21T23:06:55.427210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11485; 2024-11-21T23:06:55.428141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=852; 2024-11-21T23:06:55.429982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=817; 2024-11-21T23:06:55.430060Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1829; 2024-11-21T23:06:55.430217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=100; 2024-11-21T23:06:55.430349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:06:55.430474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=203; 2024-11-21T23:06:55.430581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=58; 2024-11-21T23:06:55.430668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=49; 2024-11-21T23:06:55.432316Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1596; 2024-11-21T23:06:55.433962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1579; 2024-11-21T23:06:55.434257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=217; 2024-11-21T23:06:55.434628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=316; 2024-11-21T23:06:55.434688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2024-11-21T23:06:55.434743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2024-11-21T23:06:55.434786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2024-11-21T23:06:55.434874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=49; 2024-11-21T23:06:55.434926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2024-11-21T23:06:55.435028Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=65; 2024-11-21T23:06:55.435085Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2024-11-21T23:06:55.435165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2024-11-21T23:06:55.435207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=27193; 2024-11-21T23:06:55.435377Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7100088;raw_bytes=7088498; inactive portions=33;blobs=66;rows=1126788;bytes=85356024;raw_bytes=106323502; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:06:55.435516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:06:55.435580Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:06:55.435663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T23:06:55.435766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:06:55.435814Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:06:55.435897Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:06:55.435941Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:06:55.436013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:06:55.436093Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=8; 2024-11-21T23:06:55.436153Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T23:06:55.436207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:06:55.436255Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:06:55.436298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:06:55.436390Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:06:55.436476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:06:55.437593Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:06:55.437704Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1981:3968];tablet_id=9437184;parent=[1:1948:3942];fline=manager.h:99;event=ask_data;request=request_id=106;1={portions_count=35};; 2024-11-21T23:06:55.439034Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1981:3968];tablet_id=9437184;parent=[1:1948:3942];fline=manager.h:99;event=ask_data;request=request_id=108;1={portions_count=2};; 2024-11-21T23:06:55.439221Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:06:55.440315Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:06:55.440355Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:06:55.440382Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:06:55.440463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:06:55.440555Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:06:55.440625Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=8; 2024-11-21T23:06:55.440689Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T23:06:55.440730Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:06:55.440783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:06:55.440832Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:06:55.440878Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:06:55.441006Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:06:55.441541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=35;path_id=1; 2024-11-21T23:06:55.443845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=35;path_id=1; 2024-11-21T23:06:55.455415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:06:55.455534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1948:3942];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MinId [GOOD] >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TFlatTableExecutorIndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutorIndexLoading::PrechargeAndSeek_FlatIndex >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2024-11-21T23:06:39.099833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:06:39.114084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:39.114314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bcd/r3tmp/tmpER3aoL/pdisk_1.dat 2024-11-21T23:06:39.774895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:06:39.823443Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:39.881599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:39.881741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:39.895824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:40.043036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:06:40.773430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:750:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:40.773564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:760:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:40.773952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:40.779792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:41.016777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:764:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:06:54.747633Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fmjr3enmx7vp2mwpq181q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNhOTBiMWQtMzgwYjYzN2ItZjIzOWU2NjgtMjQ4YTZmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:06:55.323641Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fmjr3enmx7vp2mwpq181q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNhOTBiMWQtMzgwYjYzN2ItZjIzOWU2NjgtMjQ4YTZmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:06:55.418086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fmjr3enmx7vp2mwpq181q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNhOTBiMWQtMzgwYjYzN2ItZjIzOWU2NjgtMjQ4YTZmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:06:55.679694Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jd8fmjr3enmx7vp2mwpq181q", SessionId: ydb://session/3?node_id=1&id=NjNhOTBiMWQtMzgwYjYzN2ItZjIzOWU2NjgtMjQ4YTZmYmQ=, Slow query, duration: 14.908391s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b 2024-11-21T23:06:56.334279Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fn1c83c8d9bjxehsavk7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU1NjBhM2ItMjk0ZGExMjUtMjFjZDdjYi01M2NjZjgzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2024-11-21T23:06:54.776060Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:397} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002af9/r3tmp/tmpKEwO1s//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1 2024-11-21T23:06:54.854362Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |79.6%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> TFlatTableExecutorVersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutorVersionedRows::TestVersionedRowsLargeBlobs >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit >> GenericFederatedQuery::YdbFilterPushdown |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2024-11-21T23:05:48.535534Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:05:48.803256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:05:48.880361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:05:48.880472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:05:48.880758Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:05:48.906424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:05:48.906701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:48.906993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:48.907131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:48.907265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:48.907374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:48.907484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:48.907595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:48.907726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:48.907854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:48.907995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:48.908128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:48.988588Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:49.002765Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:05:49.003130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:05:49.003211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:05:49.003426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:49.003617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:05:49.003700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:05:49.003757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:05:49.003858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:05:49.003926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:05:49.003991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:05:49.004028Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:05:49.004227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:49.004314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:05:49.004358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:05:49.004390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:05:49.004487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:05:49.004562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:05:49.004609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:05:49.007443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:05:49.007882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:05:49.007958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:05:49.008030Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:05:49.008094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:05:49.008152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:05:49.008189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:05:49.008436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2024-11-21T23:05:49.008561Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2024-11-21T23:05:49.008681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=60; 2024-11-21T23:05:49.008774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2024-11-21T23:05:49.008952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:05:49.009013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:05:49.009053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:05:49.009267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:05:49.009318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:05:49.009365Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:05:49.009530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:05:49.009575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:05:49.009612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:05:49.009816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:05:49.009857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:05:49.009892Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:05:49.010029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... e=common_data.cpp:29;EXECUTE:finishLoadingTime=6655; 2024-11-21T23:06:58.349011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=15497; 2024-11-21T23:06:58.349946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=864; 2024-11-21T23:06:58.351828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=869; 2024-11-21T23:06:58.351903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1890; 2024-11-21T23:06:58.352049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=82; 2024-11-21T23:06:58.352219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:06:58.352273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=175; 2024-11-21T23:06:58.352388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=65; 2024-11-21T23:06:58.352494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=61; 2024-11-21T23:06:58.354164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1609; 2024-11-21T23:06:58.356177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1938; 2024-11-21T23:06:58.356543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=264; 2024-11-21T23:06:58.356877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=249; 2024-11-21T23:06:58.356942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2024-11-21T23:06:58.356991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2024-11-21T23:06:58.357034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2024-11-21T23:06:58.357141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=63; 2024-11-21T23:06:58.357194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2024-11-21T23:06:58.357293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=64; 2024-11-21T23:06:58.357350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2024-11-21T23:06:58.357418Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2024-11-21T23:06:58.357455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=32004; 2024-11-21T23:06:58.357608Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=3;blobs=6;rows=75200;bytes=7465172;raw_bytes=7453400; inactive portions=42;blobs=84;rows=1026094;bytes=79798400;raw_bytes=101677198; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:06:58.357707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2014:4010];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:06:58.357752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:06:58.357853Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T23:06:58.357945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2014:4010];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:06:58.357987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:06:58.358053Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:06:58.358090Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:06:58.358147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:06:58.358222Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=8; 2024-11-21T23:06:58.358301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T23:06:58.358339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:06:58.358384Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:06:58.358864Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:06:58.358972Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:06:58.359059Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:06:58.360114Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:06:58.360232Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2047:4036];tablet_id=9437184;parent=[1:2014:4010];fline=manager.h:99;event=ask_data;request=request_id=117;1={portions_count=45};; 2024-11-21T23:06:58.361031Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2047:4036];tablet_id=9437184;parent=[1:2014:4010];fline=manager.h:99;event=ask_data;request=request_id=119;1={portions_count=3};; 2024-11-21T23:06:58.362213Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:06:58.362435Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:06:58.362471Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:06:58.362503Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:06:58.362545Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:06:58.362622Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:06:58.362684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=8; 2024-11-21T23:06:58.362740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T23:06:58.362783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:06:58.362842Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:06:58.362885Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:06:58.362928Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:06:58.363007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:06:58.363697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=45;path_id=1; 2024-11-21T23:06:58.366353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=45;path_id=1; 2024-11-21T23:06:58.370141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:06:58.370209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:2014:4010];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TIterator::SerialReverse [GOOD] >> TIterator::GetKeyWithEraseCache >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:230:2060] recipient: [1:212:2140] 2024-11-21T23:05:38.445141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:05:38.454522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:38.454938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:05:38.455989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:05:38.456680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:05:38.457277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:05:38.457734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:38.480803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:05:41.011666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:05:41.011728Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:41.038044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:05:41.038625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:05:41.038920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:05:41.089799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:05:41.095513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:05:41.100875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:41.110591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:05:41.126463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:41.136631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:41.137251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:41.150877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:05:41.151785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:41.152147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:05:41.154811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:05:41.272550Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:342:2060] recipient: [1:17:2064] 2024-11-21T23:05:42.012388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:05:42.013855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:42.015373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:05:42.020717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:05:42.021893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:42.047698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:42.049178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:05:42.059523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:42.060860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:05:42.061162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:05:42.061848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:05:42.094045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:42.094413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:05:42.094706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:05:42.114169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:42.118631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:42.119153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:42.119718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:05:42.151066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:05:42.177137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:05:42.178729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:05:42.187123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:42.187269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 237 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:05:42.187311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:42.187546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:05:42.187588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:42.187729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:42.187812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:05:42.190108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:42.190149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:42.190303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:42.190334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:309:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:05:42.194761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:42.194820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:05:42.194906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:05:42.194935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:42.194968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:05:42.195003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:42.195037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:05:42.195064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:05:42.195137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:05:42.195175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:05:42.195202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:05:42.196881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:42.196972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:42.197006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:05:42.197039Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:05:42.197093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:42.198156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2024-11-21T23:07:00.391885Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2024-11-21T23:07:00.391913Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2024-11-21T23:07:00.391943Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T23:07:00.392016Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2024-11-21T23:07:00.392051Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:07:00.393747Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:07:00.394962Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T23:07:00.394998Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:07:00.395071Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T23:07:00.395092Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:07:00.395352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T23:07:00.395396Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T23:07:00.395795Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:668:2499], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:07:00.395847Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:07:00.395883Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:07:00.396020Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2024-11-21T23:07:00.396054Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T23:07:00.396121Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T23:07:00.396236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T23:07:00.396278Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:666:2497] 2024-11-21T23:07:00.396465Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:668:2499], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:07:00.396498Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:07:00.396532Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2024-11-21T23:07:00.396949Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:542:2100], Recipient [7:229:2151] 2024-11-21T23:07:00.396998Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:07:00.399260Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 542 RawX2: 34359740468 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:07:00.399599Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T23:07:00.399725Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2024-11-21T23:07:00.399943Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:07:00.402213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:07:00.402457Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2024-11-21T23:07:00.402534Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2024-11-21T23:07:00.402919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T23:07:00.402967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T23:07:00.403350Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:674:2505], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:07:00.403396Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:07:00.403431Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:07:00.403552Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2024-11-21T23:07:00.403581Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T23:07:00.403646Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T23:07:00.403752Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T23:07:00.403791Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:672:2503] 2024-11-21T23:07:00.403959Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:674:2505], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:07:00.403991Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:07:00.404024Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2024-11-21T23:07:00.404446Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:542:2100], Recipient [7:229:2151] 2024-11-21T23:07:00.404496Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:07:00.406814Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 542 RawX2: 34359740468 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:07:00.407124Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T23:07:00.407196Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2024-11-21T23:07:00.407421Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:07:00.409757Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:07:00.410000Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2024-11-21T23:07:00.410066Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2024-11-21T23:07:00.410527Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2024-11-21T23:07:00.410570Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2024-11-21T23:07:00.410957Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:680:2511], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:07:00.411018Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:07:00.411059Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:07:00.411198Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:568:2399], Recipient [7:229:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2024-11-21T23:07:00.411230Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T23:07:00.411295Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2024-11-21T23:07:00.411408Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T23:07:00.411457Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:678:2509] 2024-11-21T23:07:00.411622Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:680:2511], Recipient [7:229:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:07:00.411657Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:07:00.411695Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > 0, a, false, 0 | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > 1, b, true, 10 | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > 2, c, false, 20 | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > 3, d, true, 30 | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > 4, e, false, 40 | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > 5, f, true, 50 | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > 6, g, false, 60 | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > 7, h, true, 70 | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > 8, i, false, 80 | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > 9, j, true, 90 | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > 0, a, false, 0 | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > 1, b, true, 10 | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > 2, c, false, 20 | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > 3, d, true, 30 | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > 4, e, false, 40 | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > 5, f, true, 50 | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > 6, g, false, 60 | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > 7, h, true, 70 | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > 8, i, false, 80 | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > 9, j, true, 90 | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > 10, k, false, 100 | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > 11, l, true, 110 | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > 12, m, false, 120 | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > 13, n, true, 130 | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > 14, o, false, 140 | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > 15, p, true, 150 | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > 16, q, false, 160 | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > 17, r, true, 170 | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > 18, s, false, 180 | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > 19, t, true, 190 | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > 0, x, NULL, NULL | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > 1, xx, NULL, NULL | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > 2, xxx, NULL, NULL | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > 3, xxxx, NULL, NULL | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > 4, xxxxx, NULL, NULL | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > 5, xxxxxx, NULL, NULL | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > 6, xxxxxxx, NULL, NULL | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > 7, xxxxxxxx, NULL, NULL | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > 8, xxxxxxxxx, NULL, NULL | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > 9, xxxxxxxxxx, NULL, NULL | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > 10, xxxxxxxxxx.., NULL, NULL | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > 11, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > 12, xxxxxxxxxx.., NULL, NULL | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > 13, xxxxxxxxxx.., NULL, NULL | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > 14, xxxxxxxxxx.., NULL, NULL | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > 15, xxxxxxxxxx.., NULL, NULL | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > 16, xxxxxxxxxx.., NULL, NULL | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > 17, xxxxxxxxxx.., NULL, NULL | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > 18, xxxxxxxxxx.., NULL, NULL | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > 19, xxxxxxxxxx.., NULL, NULL | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > 20, xxxxxxxxxx.., NULL, NULL | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > 21, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > 22, xxxxxxxxxx.., NULL, NULL | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > 23, xxxxxxxxxx.., NULL, NULL | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > 24, xxxxxxxxxx.., NULL, NULL | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > 25, xxxxxxxxxx.., NULL, NULL | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > 26, xxxxxxxxxx.., NULL, NULL | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > 27, xxxxxxxxxx.., NULL, NULL | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > 28, xxxxxxxxxx.., NULL, NULL | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > 29, xxxxxxxxxx.., NULL, NULL | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > 30, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > 31, xxxxxxxxxx.., NULL, NULL | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > 32, xxxxxxxxxx.., NULL, NULL | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > 33, xxxxxxxxxx.., NULL, NULL | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > 34, xxxxxxxxxx.., NULL, NULL | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > 35, xxxxxxxxxx.., NULL, NULL | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > 36, xxxxxxxxxx.., NULL, NULL | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > 37, xxxxxxxxxx.., NULL, NULL | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > 38, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > 39, xxxxxxxxxx.., NULL, NULL | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > 40, xxxxxxxxxx.., NULL, NULL | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > 41, xxxxxxxxxx.., NULL, NULL | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > 42, xxxxxxxxxx.., NULL, NULL | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > 43, xxxxxxxxxx.., NULL, NULL | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > 44, xxxxxxxxxx.., NULL, NULL | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > 45, xxxxxxxxxx.., NULL, NULL | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > 46, xxxxxxxxxx.., NULL, NULL | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 rev 1, 674b} | | | PageId: 10047 RowCount: 5928 DataSize: 49128 GroupDataSize: 97128 ErasedRowCount: 2568 | | | > 47, xxxxxxxxxx.., NULL, NULL | | | Pa ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b (0) | 1 2 50b (2) | 2 4 50b (4) | 3 6 50b (6) | 4 8 50b (8) | 5 10 50b (10) | 6 12 50b (12) | 7 14 50b (14) | 8 16 50b (16) | 9 18 50b (18) | 10 20 50b (20) | 11 22 50b (22) | 12 24 50b (24) | 13 26 50b (26) | 14 28 50b (28) | 15 30 50b (30) | 16 32 50b (32) | 17 34 50b (34) | 18 36 50b (36) | 19 38 50b (38) | 19 39 50b (39) + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: (0) {Set 1 Uint32 : 0} | ERowOp 1: (1) {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: (2) {Set 1 Uint32 : 200} | ERowOp 1: (3) {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: (4) {Set 1 Uint32 : 400} | ERowOp 1: (5) {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: (6) {Set 1 Uint32 : 600} | ERowOp 1: (7) {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: (8) {Set 1 Uint32 : 800} | ERowOp 1: (9) {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: (10) {Set 1 Uint32 : 1000} | ERowOp 1: (11) {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: (12) {Set 1 Uint32 : 1200} | ERowOp 1: (13) {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: (14) {Set 1 Uint32 : 1400} | ERowOp 1: (15) {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: (16) {Set 1 Uint32 : 1600} | ERowOp 1: (17) {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: (18) {Set 1 Uint32 : 1800} | ERowOp 1: (19) {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: (20) {Set 1 Uint32 : 2000} | ERowOp 1: (21) {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: (22) {Set 1 Uint32 : 2200} | ERowOp 1: (23) {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: (24) {Set 1 Uint32 : 2400} | ERowOp 1: (25) {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: (26) {Set 1 Uint32 : 2600} | ERowOp 1: (27) {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: (28) {Set 1 Uint32 : 2800} | ERowOp 1: (29) {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: (30) {Set 1 Uint32 : 3000} | ERowOp 1: (31) {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: (32) {Set 1 Uint32 : 3200} | ERowOp 1: (33) {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: (34) {Set 1 Uint32 : 3400} | ERowOp 1: (35) {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: (36) {Set 1 Uint32 : 3600} | ERowOp 1: (37) {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: (38) {Set 1 Uint32 : 3800} | ERowOp 1: (39) {Set 1 Uint32 : 3900} |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse >> HullReplWriteSst::Basic [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::DropModifiedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2024-11-21T23:06:50.454759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:06:50.461203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:50.461441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bc2/r3tmp/tmpsm86F0/pdisk_1.dat 2024-11-21T23:06:50.927859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:06:50.971573Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:51.044244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:51.044378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:51.059758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:51.197756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:06:51.759472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:702:2586], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:51.759653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:712:2591], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:51.759760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:51.774071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:52.048758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:716:2594], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:06:59.862513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fmxf45se2tg1d0s5a07ks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIwN2ZmYS1jODk0MDkyMi02NzMzYTlmZC03NzQ5MGZkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:00.796508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fn5gw0s96y1j7n9t787rk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNmMWYxMzgtOGIwNjk5NzMtNDgxNzgzMzAtY2EwNWFkZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> TPart::MassCheck [GOOD] >> TPart::WreckPart |79.7%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> HullReplWriteSst::Basic [GOOD] Test command err: commit chunk# 1 {ChunkIdx: 1 Offset: 101228544 Size: 32986016} 749680 commit chunk# 2 {ChunkIdx: 2 Offset: 101228544 Size: 32986764} 749697 commit chunk# 3 {ChunkIdx: 3 Offset: 101216256 Size: 32999964} 749997 commit chunk# 4 {ChunkIdx: 4 Offset: 101236736 Size: 32977788} 749493 commit chunk# 5 {ChunkIdx: 5 Offset: 101228544 Size: 32989184} 749752 commit chunk# 6 {ChunkIdx: 6 Offset: 101203968 Size: 33011316} 750255 commit chunk# 7 {ChunkIdx: 7 Offset: 101220352 Size: 32994376} 749870 commit chunk# 8 {ChunkIdx: 8 Offset: 101208064 Size: 33009644} 750217 commit chunk# 9 {ChunkIdx: 9 Offset: 101253120 Size: 32962696} 749150 commit chunk# 10 {ChunkIdx: 10 Offset: 101224448 Size: 32991296} 749800 commit chunk# 11 {ChunkIdx: 11 Offset: 101244928 Size: 32972772} 749379 commit chunk# 12 {ChunkIdx: 12 Offset: 101232640 Size: 32985048} 749658 commit chunk# 13 {ChunkIdx: 13 Offset: 101236736 Size: 32980340} 749551 commit chunk# 14 {ChunkIdx: 14 Offset: 101208064 Size: 33008192} 750184 commit chunk# 15 {ChunkIdx: 15 Offset: 101240832 Size: 32976864} 749472 commit chunk# 16 {ChunkIdx: 16 Offset: 101228544 Size: 32987160} 749706 commit chunk# 17 {ChunkIdx: 17 Offset: 101232640 Size: 32981660} 749581 commit chunk# 18 {ChunkIdx: 18 Offset: 101203968 Size: 33012460} 750281 >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TPart::WreckPart [GOOD] >> TPart::PageFailEnv >> YdbYqlClient::BuildInfo >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBase::DropModifiedTable [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries Test command err: 10 parts: 24000 rows, 1023 pages, 5 levels: (15913, 5312) (32008, 10677) (48025, 16016) (64045, 21356) (79984, 26669) 24000 rows, 1022 pages, 5 levels: (95941, 31988) (111937, 37320) (127807, 42610) (143950, 47991) (159964, 53329) 24000 rows, 1017 pages, 5 levels: (175939, 58654) (191968, 63997) (208006, 69343) (224077, 74700) (240169, 80064) 24000 rows, 1018 pages, 5 levels: (256087, 85370) (272023, 90682) (288022, 96015) (304021, 101348) (320014, 106679) 24000 rows, 1017 pages, 5 levels: (336010, 112011) (352159, 117394) (368092, 122705) (384097, 128040) (400177, 133400) 24000 rows, 1020 pages, 5 levels: (416161, 138728) (432139, 144054) (447946, 149323) (463885, 154636) (479950, 159991) 24000 rows, 1018 pages, 5 levels: (495844, 165289) (511843, 170622) (527917, 175980) (543799, 181274) (559849, 186624) 24000 rows, 1020 pages, 5 levels: (575899, 191974) (591895, 197306) (607765, 202596) (623614, 207879) (639565, 213196) 24000 rows, 1016 pages, 5 levels: (655633, 218552) (671707, 223910) (687631, 229218) (703516, 234513) (719437, 239820) 24000 rows, 1015 pages, 5 levels: (735415, 245146) (751432, 250485) (767404, 255809) (783427, 261150) (799303, 266442) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 10% (actual 10%) key = (80038, 26687) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160003, 53342) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240184, 80069) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320035, 106686) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400189, 133404) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559891, 186638) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719461, 239828) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80038, 26687) value = 2046023 (actual 2082181 - 0% error) 10% (actual 10%) key = (160003, 53342) value = 4091184 (actual 4123481 - 0% error) 9% (actual 9%) key = (240184, 80069) value = 6122543 (actual 6150789 - 0% error) 9% (actual 9%) key = (320035, 106686) value = 8157907 (actual 8182094 - 0% error) 9% (actual 9%) key = (400189, 133404) value = 10192958 (actual 10213133 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 12231984 (actual 12248119 - 0% error) 9% (actual 9%) key = (559891, 186638) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 16310625 (actual 16318673 - 0% error) 9% (actual 9%) key = (719461, 239828) value = 18343487 (actual 18347516 - 0% error) 9% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80041, 26688) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160006, 53343) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240187, 80070) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320038, 106687) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400192, 133405) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479959, 159994) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639649, 213224) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719467, 239830) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078803 - 0% error) 10% (actual 9%) key = (159427, 53150) value = 4076220 (actual 4112556 - 0% error) 10% (actual 9%) key = (239872, 79965) value = 6113960 (actual 6146198 - 0% error) 10% (actual 9%) key = (319849, 106624) value = 8153334 (actual 8181530 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192636 (actual 10213133 - 0% error) 10% (actual 9%) key = (479950, 159991) value = 12231529 (actual 12248119 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639481, 213168) value = 16306978 (actual 16318673 - 0% error) 10% (actual 9%) key = (719554, 239859) value = 18345472 (actual 18349551 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 40 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24122 (actual 10941 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 48205 (actual 36464 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 72229 (actual 61965 - 4% error) 10% (actual 10%) key = (290449, 96824) value = 96245 (actual 87436 - 3% error) 10% (actual 11%) key = (380554, 126859) value = 121759 (actual 114432 - 3% error) 10% (actual 11%) key = (470221, 156748) value = 147190 (actual 141318 - 2% error) 10% (actual 11%) key = (564922, 188315) value = 172572 (actual 169665 - 1% error) 10% (actual 11%) key = (654781, 218268) value = 198052 (actual 196636 - 0% error) 10% (actual 11%) key = (744745, 248256) value = 223572 (actual 223623 - 0% error) 6% (actual 6%) DataSizeHistogram: 10% (actual 4%) key = (34876, 11633) value = 2051181 (actual 936371 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 4097308 (actual 3106844 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 6142376 (actual 5275531 - 4% error) 10% (actual 11%) key = (295510, 98511) value = 8302483 (actual 7560005 - 3% error) 10% (actual 11%) key = (385543, 128522) value = 10466012 (actual 9847986 - 3% error) 11% (actual 12%) key = (485089, 161704) value = 12745808 (actual 12376381 - 1% error) 10% (actual 11%) key = (574921, 191648) value = 14910864 (actual 14665877 - 1% error) 10% (actual 10%) key = (659821, 219948) value = 16952139 (actual 16831893 - 0% error) 10% (actual 11%) key = (749764, 249929) value = 19112817 (actual 19116870 - 0% error) 6% (actual 6%) 10 parts: 458 rows, 20 pages, 2 levels: (129757, 43260) (277777, 92600) (456538, 152187) (612028, 204017) (789193, 263072) 435 rows, 19 pages, 2 levels: (112543, 37522) (293158, 97727) (441385, 147136) (615934, 205319) (788878, 262967) 945 rows, 41 pages, 3 levels: (151600, 50541) (323350, 107791) (489703, 163242) (644053, 214692) (785131, 261718) 1833 rows, 78 pages, 3 levels: (158677, 52900) (306616, 102213) (475423, 158482) (640840, 213621) (793240, 264421) 3716 rows, 157 pages, 4 levels: (159202, 53075) (325612, 108545) (486964, 162329) (645289, 215104) (796189, 265404) 7459 rows, 317 pages, 4 levels: (161596, 53873) (319558, 106527) (472684, 157569) (627499, 209174) (797368, 265797) 14922 rows, 632 pages, 5 levels: (158647, 52890) (322783, 107602) (480616, 160213) (642370, 214131) (798358, 266127) 29978 rows, 1271 pages, 5 levels: (161923, 53982) (322141, 107388) (482926, 160983) (641770, 213931) (798970, 266331) 60277 rows, 2559 pages, 6 levels: (158503, 52842) (317770, 105931) (477016, 159013) (638782, 212935) (799282, 266435) 119977 rows, 5092 pages, 6 levels: (159940, 53321) (320017, 106680) (480043, 160022) (638971, 212998) (799345, 266456) Checking BTree: Touched 0% bytes, 23 pages RowCountHistogram: 11% (actual 10%) key = (80065, 26696) value = 26939 (actual 24349 - 1% error) 8% (actual 10%) key = (160273, 53432) value = 46494 (actual 48472 - 0% error) 10% (actual 9%) key = (238531, 79518) value = 71449 (actual 71850 - 0% error) 11% (actual 10%) key = (321490, 107171) value = 97862 (actual 96825 - 0% error) 9% (actual 10%) key = (403054, 134359) value = 120541 (actual 121196 - 0% error) 9% (actual 10%) key = (482980, 161001) value = 142423 (actual 145274 - -1% error) 11% (actual 9%) key = (562504, 187509) value = 169510 (actual 169139 - 0% error) 8% (actual 10%) key = (642358, 214127) value = 189834 (actual 193159 - -1% error) 9% (actual 10%) key = (723937, 241320) value = 213159 (actual 217728 - -1% error) 11% (actual 9%) DataSizeHistogram: 9% (actual 10%) key = (78517, 26180) value = 1977474 (actual 2038537 - 0% error) 9% (actual 10%) key = (160273, 53432) value = 3961134 (actual 4131083 - 0% error) 10% (actual 9%) key = (238531, 79518) value = 6075748 (actual 6109008 - 0% error) 10% (actual 10%) key = (321490, 107171) value = 8316541 (actual 8227395 - 0% error) 9% (actual 10%) key = (403054, 134359) value = 10237629 (actual 10294360 - 0% error) 9% (actual 10%) key = (482980, 161001) value = 12091581 (actual 12337097 - -1% error) 11% (actual 9%) key = (562504, 187509) value = 14395756 (actual 14365428 - 0% error) 8% (actual 10%) key = (642358, 214127) value = 16125076 (actual 16407625 - -1% error) 9% (actual 10%) key = (723937, 241320) value = 18103285 (actual 18489967 - -1% error) 11% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (79669, 26564) value = 24001 (actual 24253 - 0% error) 10% (actual 9%) key = (159577, 53200) value = 48001 (actual 48252 - 0% error) 10% (actual 10%) key = (239932, 79985) value = 72009 (actual 72267 - 0% error) 10% (actual 10%) key = (319726, 106583) value = 96023 (actual 96287 - 0% error) 10% (actual 10%) key = (400054, 133359) value = 120041 (actual 120298 - 0% error) 10% (actual 10%) key = (479776, 159933) value = 144046 (actual 144308 - 0% error) 10% (actual 10%) key = (559822, 186615) value = 168053 (actual 168314 - 0% error) 10% (actual 10%) key = (639604, 213209) value = 192056 (actual 192323 - 0% error) 10% (actual 10%) key = (719287, 239770) value = 216074 (actual 216340 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79483, 26502) value = 2038439 (actual 2060507 - 0% error) 10% (actual 10%) key = (159100, 53041) value = 4076738 (actual 4098819 - 0% error) 10% (actual 10%) key = (239569, 79864) value = 6115069 (actual 6136999 - 0% error) 10% (actual 10%) key = (319558, 106527) value = 8153647 (actual 8175425 - 0% error) 10% (actual 10%) key = (399883, 133302) value = 10192275 (actual 10214181 - 0% error) 10% (actual 10%) key = (479713, 159912) value = 12231174 (actual 12253015 - 0% error) 10% (actual 10%) key = (559510, 186511) value = 14269344 (actual 14291540 - 0% error) 10% (actual 10%) key = (639241, 213088) value = 16307225 (actual 16329282 - 0% error) 10% (actual 10%) key = (719065, 239696) value = 18345696 (actual 18367843 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 45 pages RowCountHistogram: 10% (actual 6%) key = (49852, 16625) value = 25345 (actual 15296 - 4% error) 10% (actual 11%) key = (138232, 46085) value = 49467 (actual 41866 - 3% error) 10% (actual 9%) key = (217993, 72672) value = 73622 (actual 65713 - 3% error) 10% (actual 10%) key = (301231, 100418) value = 97803 (actual 90730 - 2% error) 10% (actual 10%) key = (382450, 127491) value = 121925 (actual 115037 - 2% error) 10% (actual 11%) key = (472855, 157626) value = 146109 (actual 142266 - 1% error) 10% (actual 8%) key = (542308, 180777) value = 170141 (actual 163067 - 2% error) 10% (actual 11%) key = (632302, 210775) value = 194246 (actual 190164 - 1% error) 10% (actual 10%) key = (718414, 239479) value = 219732 (actual 216100 - 1% error) ... (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (553, 192) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: 167 rows, 1 pages, 0 levels: () () () () () 166 rows, 1 pages, 0 levels: () () () () () 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 >> TGRpcNewCoordinationClient::SessionMethods |79.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTableProfileTests::DescribeTableWithPartitioningPolicy >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> TGRpcYdbTest::CreateTableBadRequest >> YdbYqlClient::TestYqlWrongTable >> TSchemeShardUserAttrsTest::Boot >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts >> TYqlDateTimeTests::SimpleUpsertSelect >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups >> YdbYqlClient::TestYqlIssues >> YdbImport::EmptyData >> YdbTableBulkUpsert::NotNulls >> YdbYqlClient::DeleteTableWithDeletedIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2024-11-21T23:07:00.340676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873005622292157:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:00.340753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001f13/r3tmp/tmpckkUhG/pdisk_1.dat 2024-11-21T23:07:00.754582Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:00.761615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:00.761733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:00.766526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63580, node 1 2024-11-21T23:07:00.868741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001f13/r3tmp/yandexsWCrvm.tmp 2024-11-21T23:07:00.868771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001f13/r3tmp/yandexsWCrvm.tmp 2024-11-21T23:07:00.868958Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001f13/r3tmp/yandexsWCrvm.tmp 2024-11-21T23:07:00.869059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11281 PQClient connected to localhost:63580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:01.243318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:07:04.039193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873022802162044:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:04.039514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:04.043764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873022802162056:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:04.048828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:07:04.076579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873022802162058:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:07:04.465026Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873022802162131:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:04.472530Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTViMzA3NDQtYTQwZDU0M2ItZjIxM2M3MjMtZjQ0NmY0ZWE=, ActorId: [1:7439873018507194730:2302], ActorState: ExecuteState, TraceId: 01jd8fn9ebcah3123rar3yav0z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:04.479981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:07:04.481132Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:04.611155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:07:04.758917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T23:07:05.084298Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8fna8d2m6s34qgsztvzmxm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVhNjM3NGMtNWU0MGRkYTQtOTBiMWMyOWUtOTJkZTVjNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:05.127364Z node 1 :HTTP WARN: [::1]:45628 anonymous GET /actors/pqcd/health 2024-11-21T23:07:05.232311Z node 1 :HTTP WARN: [::1]:45630 anonymous GET /actors/pqcd/health 2024-11-21T23:07:05.338527Z node 1 :HTTP WARN: [::1]:45640 anonymous GET /actors/pqcd/health 2024-11-21T23:07:05.342740Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873005622292157:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:05.342810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:05.454536Z node 1 :HTTP WARN: [::1]:45644 anonymous GET /actors/pqcd/health 2024-11-21T23:07:05.560675Z node 1 :HTTP WARN: [::1]:45660 anonymous GET /actors/pqcd/health 2024-11-21T23:07:05.670990Z node 1 :HTTP WARN: [::1]:45668 anonymous GET /actors/pqcd/health 2024-11-21T23:07:05.785559Z node 1 :HTTP WARN: [::1]:45676 anonymous GET /actors/pqcd/health 2024-11-21T23:07:05.922534Z node 1 :HTTP WARN: [::1]:45690 anonymous GET /actors/pqcd/health >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::DirectReadCleanCache >> TGRpcClientLowTest::GrpcRequestProxy >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b (0, 1) | 3 39 620b (5, 7) + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b (0, 1) | 1 39 2466b (5, 7) + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 6 12 122b (1, 8) | 7 14 122b (2, NULL) | 8 16 122b (2, 4) | 9 18 122b (2, 7) | 10 20 122b (2, 10) | 11 22 122b (3, 3) | 12 24 122b (3, 6) | 13 26 122b (3, 8) | 14 28 122b (4, NULL) | 15 30 122b (4, 4) | 16 32 122b (4, 7) | 17 34 122b (4, 10) | 18 36 122b (5, 3) | 19 38 122b (5, 6) | 19 39 122b (5, 7) + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > (0, 4) | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > (0, 7) | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > (0, 10) | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > (1, 3) | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > (1, 6) | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > (1, 8) | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > (2, NULL) | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > (2, 4) | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > (2, 7) | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > (2, 10) | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > (3, 3) | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > (3, 6) | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > (3, 8) | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > (4, NULL) | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > (4, 4) | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > (4, 7) | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > (4, 10) | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > (5, 3) | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > (5, 6) | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 ... xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 7 12 122b (1, 8) | 8 14 122b (2, NULL) | 9 16 122b (2, 4) | 11 18 122b (2, 7) | 12 20 122b (2, 10) | 13 22 122b (3, 3) | 15 24 122b (3, 6) | 16 26 122b (3, 8) | 17 28 122b (4, NULL) | 19 30 122b (4, 4) | 20 32 122b (4, 7) | 21 34 122b (4, 10) | 24 36 122b (5, 3) | 25 38 122b (5, 6) | 25 39 122b (5, 7) + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::ForwardEnvColumnGroups >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes >> TSchemeShardUserAttrsTest::Boot [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:07:10.434567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:07:10.434749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:07:10.434802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:07:10.434873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:07:10.434928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:07:10.434964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:07:10.435040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:07:10.454214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:07:10.797367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:07:10.797429Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:10.878565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:07:10.893586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:07:10.922598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:07:11.008621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:07:11.032904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:07:11.049774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:07:11.085757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:07:11.138851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:07:11.316272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:07:11.316398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:07:11.337868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:07:11.337973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:07:11.338043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:07:11.338172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.360528Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:07:11.616684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:07:11.616995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.617229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:07:11.617504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:07:11.617583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.649869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:07:11.665323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:07:11.665702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.665779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:07:11.665834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:07:11.665874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:07:11.671522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.671620Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:07:11.671660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:07:11.696910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.696985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.697068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:07:11.697136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:07:11.709520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:07:11.712703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:07:11.712904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:07:11.728736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:07:11.728920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:07:11.728969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:07:11.729299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:07:11.729362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:07:11.732072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:07:11.732277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:07:11.739291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:07:11.739349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:07:11.739523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:07:11.739560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:07:11.746969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.747040Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:07:11.747140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:07:11.747176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:07:11.747211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:07:11.747245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:07:11.747286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:07:11.747324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:07:11.747403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:07:11.747441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:07:11.747466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:07:11.748897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:07:11.749003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:07:11.749055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:07:11.749105Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:07:11.749143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:07:11.749226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:07:11.757321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:07:11.757866Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:07:10.434555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:07:10.434757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:07:10.434812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:07:10.434882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:07:10.434929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:07:10.434969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:07:10.435056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:07:10.454759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:07:10.770082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:07:10.770150Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:10.882574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:07:10.897497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:07:10.923633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:07:11.005749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:07:11.032664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:07:11.049246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:07:11.085689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:07:11.166511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:07:11.316924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:07:11.317010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:07:11.338860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:07:11.338950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:07:11.339014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:07:11.339121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.360494Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:07:11.597632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:07:11.606735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.607109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:07:11.618313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:07:11.618463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.649885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:07:11.665327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:07:11.665697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.665783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:07:11.665838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:07:11.665875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:07:11.668607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.668676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:07:11.668716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:07:11.683379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.683468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.690444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:07:11.690579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:07:11.702862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:07:11.712348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:07:11.712593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:07:11.727603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:07:11.727802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:07:11.727875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:07:11.729862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:07:11.729958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:07:11.733807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:07:11.733990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:07:11.736481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:07:11.736536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:07:11.736739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:07:11.736785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:07:11.746969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:07:11.747045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:07:11.747142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:07:11.747183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:07:11.747242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:07:11.747288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:07:11.747342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:07:11.747384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:07:11.747487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:07:11.747540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:07:11.747601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:07:11.749445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:07:11.749575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:07:11.749617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:07:11.749669Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:07:11.749719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:07:11.749814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... t TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:07:12.041849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:07:12.041892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:07:12.042346Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:07:12.042488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:07:12.042537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:327:2319] TestWaitNotification: OK eventTxId 102 2024-11-21T23:07:12.043102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:07:12.043291Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 213us result status StatusSuccess 2024-11-21T23:07:12.043611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T23:07:12.046203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:07:12.046424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:07:12.046533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T23:07:12.046664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:07:12.046706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:07:12.050281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:07:12.050473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2024-11-21T23:07:12.050671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:07:12.050716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:07:12.050810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2024-11-21T23:07:12.050957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:07:12.059530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T23:07:12.059716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2024-11-21T23:07:12.060073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:07:12.060197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:07:12.060261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2024-11-21T23:07:12.060448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T23:07:12.060485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:07:12.060548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:07:12.060629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T23:07:12.060687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:07:12.060724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:07:12.060761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:07:12.060796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:07:12.060861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:07:12.060904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2024-11-21T23:07:12.060955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T23:07:12.067482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:07:12.067586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:07:12.067788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:07:12.067836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T23:07:12.068407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:07:12.068513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:07:12.068547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:07:12.068590Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T23:07:12.068626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:07:12.068728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T23:07:12.075124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:07:12.075508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:07:12.075568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:07:12.076095Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:07:12.076202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:07:12.076256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:344:2336] TestWaitNotification: OK eventTxId 103 2024-11-21T23:07:12.076860Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:07:12.077090Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 231us result status StatusSuccess 2024-11-21T23:07:12.077483Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbYqlClient::TestDescribeDirectory >> YdbImport::EmptyData [GOOD] >> TYqlDateTimeTests::TimestampKey ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 12014, MsgBus: 15135 2024-11-21T23:06:59.838438Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873000722724532:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:59.838484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c87/r3tmp/tmpvU46Ra/pdisk_1.dat 2024-11-21T23:07:00.249493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:00.249598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:00.255129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:00.282099Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12014, node 1 2024-11-21T23:07:00.354983Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:00.355016Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:00.355021Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:00.355106Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15135 TClient is connected to server localhost:15135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:00.912917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:00.931070Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:03.256146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873017902594132:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.256284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.698229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T23:07:03.854762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873017902594246:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.854854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.855019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873017902594251:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.868058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T23:07:03.879373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873017902594253:2316], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:07:04.547809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:07:04.881719Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873000722724532:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:04.882022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:05.035750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2024-11-21T23:07:05.694814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:07:06.470874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.193970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.666931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T23:07:07.755937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T23:07:10.303043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710715:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 |79.7%| [TM] {asan, default-linux-x86_64, pic, release} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> TErasureTypeTest::TestAllSpecies1of2 [GOOD] >> TErasureTypeTest::TestAllSpecies2of2 >> TFlatTableExecutorVersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableLongTx::MemTableLongTx >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed >> TFlatTableLongTx::MemTableLongTx [GOOD] >> TFlatTableLongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableLongTx::CompactCommittedLongTx >> TFlatTableLongTx::CompactCommittedLongTx [GOOD] >> TFlatTableLongTx::CompactedLongTxRestart [GOOD] >> TFlatTableLongTx::CompactMultipleChanges >> TFlatTableLongTx::CompactMultipleChanges [GOOD] >> TFlatTableLongTx::LongTxBorrow >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors >> TFlatTableLongTx::LongTxBorrow [GOOD] >> TFlatTableLongTx::MemTableLongTxRead [GOOD] >> TFlatTableLongTxAndBlobs::SmallValues >> TFlatTableLongTxAndBlobs::SmallValues [GOOD] >> TFlatTableLongTxAndBlobs::OuterBlobValues >> TFlatTableLongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableLongTxAndBlobs::ExternalBlobValues >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::DateKey >> TFlatTableLongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTablePostponedScan::TestPostponedScan >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Ten_Mixed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2024-11-21T23:05:53.929263Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:05:54.038963Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:05:54.060129Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:05:54.060219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:05:54.060451Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:05:54.069322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:05:54.069546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:54.069742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:54.069892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:54.070004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:54.070116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:54.070243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:54.070357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:54.071383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:54.071513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:54.071616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:54.071736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:54.091986Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:54.098724Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:05:54.098978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:05:54.099031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:05:54.099258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:54.099386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:05:54.099444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:05:54.099488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:05:54.099580Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:05:54.099648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:05:54.099731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:05:54.099830Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:05:54.100040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:54.100127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:05:54.100174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:05:54.100203Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:05:54.100303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:05:54.100363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:05:54.100402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:05:54.100428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:05:54.100495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:05:54.100533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:05:54.100583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:05:54.100631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:05:54.100669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:05:54.100700Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:05:54.100852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2024-11-21T23:05:54.100947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2024-11-21T23:05:54.101039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2024-11-21T23:05:54.101145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2024-11-21T23:05:54.101295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:05:54.101361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:05:54.101395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:05:54.101591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:05:54.101652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:05:54.101684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:05:54.101806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:05:54.101842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:05:54.101869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:05:54.102056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:05:54.102094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:05:54.102126Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:05:54.102234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... e=common_data.cpp:29;EXECUTE:finishLoadingTime=4863; 2024-11-21T23:07:13.090257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=12701; 2024-11-21T23:07:13.091335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=1006; 2024-11-21T23:07:13.093280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=980; 2024-11-21T23:07:13.093354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1955; 2024-11-21T23:07:13.093505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=98; 2024-11-21T23:07:13.093645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:07:13.093699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=147; 2024-11-21T23:07:13.093801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=58; 2024-11-21T23:07:13.093880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=39; 2024-11-21T23:07:13.095890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1961; 2024-11-21T23:07:13.098115Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2128; 2024-11-21T23:07:13.098634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=449; 2024-11-21T23:07:13.099007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=311; 2024-11-21T23:07:13.099069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2024-11-21T23:07:13.099113Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2024-11-21T23:07:13.099160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2024-11-21T23:07:13.099242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=49; 2024-11-21T23:07:13.099297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2024-11-21T23:07:13.099398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=62; 2024-11-21T23:07:13.099448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2024-11-21T23:07:13.099533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2024-11-21T23:07:13.099583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29315; 2024-11-21T23:07:13.099757Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=2;blobs=4;rows=75200;bytes=7400888;raw_bytes=7389306; inactive portions=35;blobs=70;rows=1239297;bytes=98153860;raw_bytes=121802583; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:07:13.099872Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:07:13.099917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:07:13.099990Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T23:07:13.100078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:07:13.100121Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:07:13.100192Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:07:13.100232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:07:13.100312Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:07:13.100395Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T23:07:13.100484Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T23:07:13.100523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:07:13.100571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:07:13.100603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:07:13.100687Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:07:13.100768Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:07:13.101720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:07:13.101812Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4061:5688];tablet_id=9437184;parent=[1:4006:5640];fline=manager.h:99;event=ask_data;request=request_id=232;1={portions_count=37};; 2024-11-21T23:07:13.102563Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:4061:5688];tablet_id=9437184;parent=[1:4006:5640];fline=manager.h:99;event=ask_data;request=request_id=234;1={portions_count=2};; 2024-11-21T23:07:13.102755Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:07:13.103140Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:07:13.103175Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:07:13.103201Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:07:13.103241Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:07:13.103301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:07:13.103363Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=9; 2024-11-21T23:07:13.103420Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2024-11-21T23:07:13.103463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:07:13.103534Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:07:13.103592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:07:13.103636Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:07:13.103715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:07:13.104898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=37;path_id=1; 2024-11-21T23:07:13.123960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=37;path_id=1; 2024-11-21T23:07:13.127649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:07:13.127737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4006:5640];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> TFlatTablePostponedScan::TestPostponedScan [GOOD] >> TFlatTablePostponedScan::TestCancelFinishedScan >> TFlatTablePostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTablePostponedScan::TestCancelRunningPostponedScan >> TFlatTablePostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTablePostponedScan::TestPostponedScanSnapshotMVCC >> TFlatTablePostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 >> TGRpcYdbTest::ExecuteQueryImplicitSession >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TFlatTableReschedule::TestExecuteReschedule [GOOD] >> TFlatTableSnapshotWithCommits::SnapshotWithCommits [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTable >> TGenCompaction::OverloadFactorDuringForceCompaction >> TGRpcNewClient::SimpleYqlQuery >> YdbS3Internal::TestS3Listing >> TFlatTableExecutorIndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> TFlatTableExecutorIndexLoading::PrechargeAndSeek_BTreeIndex >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::GetKey >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TIterator::GetKey [GOOD] >> YdbTableBulkUpsertOlap::UpsertCsvBug >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> YdbYqlClient::TraceId [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:218:2060] recipient: [1:212:2140] Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:230:2060] recipient: [1:212:2140] 2024-11-21T23:05:33.792934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:05:33.793023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:33.793058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:05:33.793107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:05:33.793152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:05:33.793179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:05:33.793232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:05:33.793557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:05:33.869669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:05:33.869745Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:33.885835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:05:33.886587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:05:33.886810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:05:33.893117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:05:33.893479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:05:33.894081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:33.894260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:05:33.897414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:33.898498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:33.898674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:33.898826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:05:33.898866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:33.898898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:05:33.899036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:05:33.905060Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:229:2151] sender: [1:341:2060] recipient: [1:17:2064] 2024-11-21T23:05:34.024782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:05:34.025012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:34.025183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:05:34.025394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:05:34.025469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:34.027463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:34.027575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:05:34.027727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:34.027785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:05:34.027826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:05:34.027864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:05:34.029734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:34.029803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:05:34.029839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:05:34.031470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:34.031510Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:34.031553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:34.031591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:05:34.035278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:05:34.037249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:05:34.037411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:05:34.038382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:05:34.038531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 235 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:05:34.038583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:34.038858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:05:34.038916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:05:34.039063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:34.039134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:05:34.041240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:05:34.041280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:05:34.041450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:05:34.041490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:308:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:05:34.041822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:05:34.041866Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:05:34.041962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:05:34.042002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:34.042052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:05:34.042089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:05:34.042122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:05:34.042150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:05:34.042224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:05:34.042267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:05:34.042294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:05:34.044078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:34.044187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:05:34.044221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:05:34.044257Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:05:34.044308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:05:34.044418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... CE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:14.578496Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:14.578568Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:14.578596Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:14.852109Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:14.852176Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:14.852256Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:14.852284Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:15.110117Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:15.110192Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:15.110267Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:15.110294Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:15.378650Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:15.378728Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:15.378807Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:15.378837Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:15.665043Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:15.665109Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:15.665182Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:15.665213Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:15.925364Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:15.925443Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:15.925520Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:15.925552Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:16.214658Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:16.214744Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:16.214822Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:16.214853Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:16.473683Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:16.473747Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:16.473813Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:16.473841Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:16.726612Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:16.726681Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:16.726767Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:16.726798Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:17.026684Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:17.026767Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:07:17.026840Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:229:2151], Recipient [7:229:2151]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:17.026867Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:07:17.079201Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1072:2837], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T23:07:17.079293Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T23:07:17.079443Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:07:17.079685Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 228us result status StatusPathDoesNotExist 2024-11-21T23:07:17.079859Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:07:17.080421Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1073:2838], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T23:07:17.080484Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T23:07:17.080580Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:07:17.080748Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 176us result status StatusPathDoesNotExist 2024-11-21T23:07:17.080921Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:07:17.081410Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1074:2839], Recipient [7:229:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2024-11-21T23:07:17.081466Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T23:07:17.081551Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:07:17.081730Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 177us result status StatusPathDoesNotExist 2024-11-21T23:07:17.081876Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TIterator::GetKey [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:36.723178Z 00000.007 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.009 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.015 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.015 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 302b annex 0, ~{ } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} hope 1 -> done Change{2, redo 0b alter 15b annex 0, ~{ } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 9216b requested for data (10240b in total) 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 10240b of static mem, Memory{10240 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release 5120b of static, Memory{5120 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} captured Res{static 5120b} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{6144 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} found attached Res{static 5120b} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} release captured by tx Res{static 5120b}, Memory{1024 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 1b requested for data (5121b in total) 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 5121b of static mem, Memory{5121 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release 5121b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 19456b requested for data (20480b in total) 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release tx data 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} request Res{1 20480b} type small_transaction 00000.021 DD| RESOURCE_BROKER: Submitted new unknown task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) priority=5 resources={0, 20480} 00000.021 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])' of unknown type 'small_transaction' to default queue 00000.021 DD| RESOURCE_BROKER: Allocate resources {0, 20480} for task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) from queue queue_default 00000.021 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])' of unknown type 'small_transaction' to default queue 00000.022 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.002384 (insert task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])) 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} acquired dyn mem Res{1 20480b}, Memory{0 dyn 20480} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 2 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} update resource task 1 releasing 10240b, Memory{0 dyn 10240} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} captured Res{1 10240b} 00000.022 DD| RESOURCE_BROKER: Update task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) (priority=5 type=small_transaction resources={0, 10240} resubmit=0) 00000.022 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])' of unknown type 'small_transaction' to default queue 00000.022 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.001192 (insert task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061])) 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 10240} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} found attached Res{1 10240b} 00000.022 DD| TABLET_EXECUTOR: release 1024b of static tx data due to attached res 1, Memory{0 dyn 10240} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 1b requested for data (10241b in total) 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release tx data 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} released on update Res{1 10240b}, Memory{0 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} update Res{1 20480b} type small_transaction 00000.023 DD| RESOURCE_BROKER: Update cookie for task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) 00000.023 DD| RESOURCE_BROKER: Update task Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [1:29:2061]) (priority=5 type=small_transaction resources={0, 20480} resubmit=1) 00000.023 EE| RESOURCE_BROKER: Assigning w ... 00000.015 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.015 II| FAKE_ENV: All BS storage groups are stopped 00000.015 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.015 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:07:15.432023Z 00000.007 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.009 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.009 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 ...inserting rows 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 4832b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ...making snapshot and writing to table 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite} hope 1 -> done Change{3, redo 256b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxMakeSnapshotAndWrite} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 2/8589934595, generation 0 00000.015 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.015 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.015 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.015 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 4 00000.018 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch 2, 0 parts} step 5, product {1 parts epoch 2} done 00000.018 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 8 for step 5 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...waiting for snapshot to complete ...borrowing snapshot 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot} hope 1 -> done Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxBorrowSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 ...checking rows 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 1 -> done Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.022 II| TABLET_EXECUTOR: Leader{1:2:8} suiciding, Waste{2:0, 3447b +(1, 892b), 7 trc, -892b acc} 00000.025 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.025 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 349 bytes, 349 total, blobs: { [1:2:7:1:36864:38:0], [1:2:2:1:8192:209:0], [1:2:6:1:32768:102:0] } 00000.026 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 1183 bytes, 1183 total, blobs: { [1:2:5:1:12288:161:0], [1:2:3:1:24576:892:0], [1:2:4:1:24576:130:0] } 00000.027 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.027 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 231b, wait} done, Waste{2:0, 3447b +(1, 892b), 7 trc} 00000.027 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ...checking rows 00000.028 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows 00000.028 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.028 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 1 -> retry Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} touch new 0b, 2820b lo load (2820b in total), 0b requested for data (4194304b in total) 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} postponed, 2820b, pages {1 wait, 1 load}, freshly touched 1 pages 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:5:1:12288:161:0] ok OK}, category 1 00000.031 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 2 -> done Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.031 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} ...restarting tablet 00000.031 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 3447b +(0, 0b), 1 trc, -892b acc} 00000.034 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 231 bytes, 231 total, blobs: { [1:3:1:1:28672:231:0] } 00000.035 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 247 bytes, 247 total, blobs: { [1:2:7:1:36864:38:0], [1:2:2:1:8192:209:0] } 00000.035 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 291 bytes, 291 total, blobs: { [1:2:5:1:12288:161:0], [1:2:4:1:24576:130:0] } 00000.036 II| TABLET_EXECUTOR: Leader{1:4:0} activating executor 00000.036 II| TABLET_EXECUTOR: LSnap{1:4, on 4:1, 231b, wait} done, Waste{2:0, 3447b +(0, 0b), 1 trc} 00000.037 DD| TABLET_EXECUTOR: Leader{1:4:2} commited cookie 2 for step 1 ...checking rows 00000.037 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows 00000.038 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.038 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 1 -> retry Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.038 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} touch new 0b, 2820b lo load (2820b in total), 0b requested for data (4194304b in total) 00000.038 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.038 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} postponed, 2820b, pages {1 wait, 1 load}, freshly touched 1 pages 00000.038 DD| TABLET_EXECUTOR: Leader{1:4:2} got result TEvResult{1 pages [1:2:5:1:12288:161:0] ok OK}, category 1 00000.040 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} hope 2 -> done Change{4, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.040 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableSnapshotWithCommits::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} 00000.040 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.041 II| TABLET_EXECUTOR: Leader{1:4:2} suiciding, Waste{2:0, 3447b +(0, 0b), 1 trc, -892b acc} 00000.041 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 4 reqs hit {4 5694b} miss {0 0b} 00000.041 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.041 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {602b, 12} 00000.041 II| FAKE_ENV: DS.1 gone, left {3678b, 6}, put {4938b, 10} 00000.041 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.041 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.041 II| FAKE_ENV: All BS storage groups are stopped 00000.041 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.041 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 85}, stopped >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Crossed >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect >> YdbYqlClient::TestDescribeDirectory [GOOD] >> YdbYqlClient::TestDoubleKey >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> TYqlDateTimeTests::DateKey [GOOD] >> TYqlDateTimeTests::DatetimeKey >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession >> YdbYqlClient::TestDecimal >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::BiStreamPing >> YdbYqlClient::DiscoveryLocationOverride >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> TGRpcNewClient::TestAuth >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] Test command err: 2024-11-21T23:06:36.717457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872902616401089:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:36.717519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023f8/r3tmp/tmpmW3CD3/pdisk_1.dat 2024-11-21T23:06:37.674766Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:37.734101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:37.742567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:37.752160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24968, node 1 2024-11-21T23:06:37.974913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:37.974939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:37.974946Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:37.975031Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:38.535972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:38.554987Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:06:38.609847Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:41.354871Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:41.357512Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTliNTYxN2EtZDc1MTczMTctZjFkM2U4NTYtZDRjMmEwNQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTliNTYxN2EtZDc1MTczMTctZjFkM2U4NTYtZDRjMmEwNQ== 2024-11-21T23:06:41.357800Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872924091237987:2300], Start check tables existence, number paths: 2 2024-11-21T23:06:41.357861Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTliNTYxN2EtZDc1MTczMTctZjFkM2U4NTYtZDRjMmEwNQ==, ActorId: [1:7439872924091237988:2301], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:41.384127Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:06:41.384207Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872924091237987:2300], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:41.384253Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872924091237987:2300], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:41.384295Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872924091237987:2300], Successfully finished 2024-11-21T23:06:41.384721Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:41.384740Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:41.384750Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:41.403397Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872924091238015:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:41.418707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:41.421296Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872924091238015:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2024-11-21T23:06:41.427583Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872924091238015:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:06:41.436214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872924091238015:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:06:41.530630Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872924091238015:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:41.535435Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872924091238015:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:06:41.545520Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T23:06:41.545558Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:06:41.545931Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872924091238075:2305], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T23:06:41.546164Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTliNTYxN2EtZDc1MTczMTctZjFkM2U4NTYtZDRjMmEwNQ==, ActorId: [1:7439872924091237988:2301], ActorState: ReadyState, TraceId: 01jd8fmkg7bhttd6012h4pa524, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL default WITH ( CONCURRENT_QUERY_LIMIT=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T23:06:41.893149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872924091238075:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:41.911083Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872902616401089:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:41.919070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:06:41.919185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:41.933945Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OTliNTYxN2EtZDc1MTczMTctZjFkM2U4NTYtZDRjMmEwNQ==, ActorId: [1:7439872924091237988:2301], ActorState: ExecuteState, TraceId: 01jd8fmkg7bhttd6012h4pa524, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7439872924091238076:2301] WorkloadServiceCleanup: 0 2024-11-21T23:06:41.935781Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTliNTYxN2EtZDc1MTczMTctZjFkM2U4NTYtZDRjMmEwNQ==, ActorId: [1:7439872924091237988:2301], ActorState: CleanupState, TraceId: 01jd8fmkg7bhttd6012h4pa524, EndCleanup, isFinal: 0 2024-11-21T23:06:41.935956Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTliNTYxN2EtZDc1MTczMTctZjFkM2U4NTYtZDRjMmEwNQ==, ActorId: [1:7439872924091237988:2301], ActorState: CleanupState, TraceId: 01jd8fmkg7bhttd6012h4pa524, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7439872902616401127:2256] 2024-11-21T23:06:41.942459Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzU2YWU3NDYtNDMyODNhYmUtNjY0YWRjMGUtNWM0ZjY4MjY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzU2YWU3NDYtNDMyODNhYmUtNjY0YWRjMGUtNWM0ZjY4MjY= 2024-11-21T23:06:41.942758Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzU2YWU3NDYtNDMyODNhYmUtNjY0YWRjMGUtNWM0ZjY4MjY=, ActorId: [1:7439872924091238089:2307], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:41.942905Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzU2YWU3NDYtNDMyODNhYmUtNjY0YWRjMGUtNWM0ZjY4MjY=, ActorId: [1:7439872924091238089:2307], ActorState: ReadyState, TraceId: 01jd8fmkwpch3q7wvatenmwh0k, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439872924091238088:2343] database: Root databaseId: /Root pool id: default 2024-11-21T23:06:41.942962Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T23:06:41.943023Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439872924091238089:2307], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=1&id=NzU2YWU3NDYtNDMyODNhYmUtNjY0YWRjMGUtNWM0ZjY4MjY= 2024-11-21T23:06:41.943066Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872924091238091:2308], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T23:06:41.943134Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872924091238092:2309], Database: /Root, Start database fetching 2024-11-21T23:06:41.944827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872924091238091:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:41.944900Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872924091238092:2309], Database: /Root, Database info successfully fetched, serv ... rId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, ExecutePhyTx, tx: 0x0000000000000000 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 1 2024-11-21T23:07:20.392249Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, TExecPhysicalRequest, add DeferredEffect to Transaction, current Transactions.size(): 1 2024-11-21T23:07:20.392296Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, TExecPhysicalRequest, tx has commit locks 2024-11-21T23:07:20.392378Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, Sending to Executer TraceId: 0 8 2024-11-21T23:07:20.392458Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, Created new KQP executer: [5:7439873089035230224:2422] isRollback: 0 2024-11-21T23:07:20.400489Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:07:20.400683Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, txInfo Status: Committed Kind: ReadWrite TotalDuration: 18.14 ServerDuration: 17.957 QueriesCount: 2 2024-11-21T23:07:20.400825Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:07:20.400899Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:07:20.400929Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, EndCleanup, isFinal: 0 2024-11-21T23:07:20.400992Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsdx6haxcec5sefkzbcq, Sent query response back to proxy, proxyRequestId: 18, proxyId: [5:7439873050380523518:2256] 2024-11-21T23:07:20.401571Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, TxId: 2024-11-21T23:07:20.401692Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T23:07:20.402422Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ReadyState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, received request, proxyRequestId: 19 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [5:7439873089035230232:2431] database: /Root databaseId: /Root pool id: default 2024-11-21T23:07:20.402463Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ReadyState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, request placed into pool from cache: default 2024-11-21T23:07:20.402541Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ReadyState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, Sending CompileQuery request 2024-11-21T23:07:20.404702Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, ExecutePhyTx, tx: 0x000050C000082018 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:07:20.404777Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, Sending to Executer TraceId: 0 8 2024-11-21T23:07:20.404855Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, Created new KQP executer: [5:7439873089035230236:2422] isRollback: 0 2024-11-21T23:07:20.412701Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T23:07:20.412795Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, ExecutePhyTx, tx: 0x000050C00007A5D8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T23:07:20.413802Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:07:20.413965Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, txInfo Status: Committed Kind: ReadOnly TotalDuration: 9.409 ServerDuration: 9.272 QueriesCount: 2 2024-11-21T23:07:20.414097Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:07:20.414166Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:07:20.414200Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, EndCleanup, isFinal: 0 2024-11-21T23:07:20.414263Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ExecuteState, TraceId: 01jd8fnsejdd3trzq7dbbwdje6, Sent query response back to proxy, proxyRequestId: 19, proxyId: [5:7439873050380523518:2256] 2024-11-21T23:07:20.414828Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, TxId: 2024-11-21T23:07:20.414950Z node 5 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, TxId: 2024-11-21T23:07:20.415077Z node 5 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [5:7439873071855360517:2310], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2024-11-21T23:07:20.415524Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:07:20.415564Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:07:20.415596Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:07:20.415629Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:07:20.415711Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YmYyNTkyMDAtY2I1NWY0NzYtMmRmYWIyNzgtYmE3ZWNjNDg=, ActorId: [5:7439873089035230199:2422], ActorState: unknown state, Session actor destroyed >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery |79.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> TGRpcNewCoordinationClient::CheckUnauthorized >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> TPersQueueTest::DirectReadCleanCache [FAIL] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] >> TGRpcYdbTest::BeginTxRequestError >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::QueryLimits >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestDecimal1 >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcNewClient::YqlQueryWithParams >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::DataValidation >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> TYqlDateTimeTests::DatetimeKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbS3Internal::BadRequests >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 20593, MsgBus: 20360 2024-11-21T23:06:58.399715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872998668734283:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:58.399756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002cc0/r3tmp/tmpJQSvV1/pdisk_1.dat 2024-11-21T23:06:59.157084Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:59.213402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:59.213527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:59.223950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20593, node 1 2024-11-21T23:06:59.535405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:59.535427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:59.535439Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:59.535524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20360 TClient is connected to server localhost:20360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:00.570720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:00.605264Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:03.419043Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872998668734283:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:03.419108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:03.423389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873020143571174:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.423506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.750788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T23:07:03.865946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873020143571296:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.866017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.866322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873020143571301:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:03.869382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T23:07:03.877687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873020143571303:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:07:04.650155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:07:05.221803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2024-11-21T23:07:05.862620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:07:06.387540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:07:06.994208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.619366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T23:07:07.693889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.776190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710723:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 5013, MsgBus: 8436 2024-11-21T23:07:13.036592Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873062160807290:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:13.036662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002cc0/r3tmp/tmpW5c6rZ/pdisk_1.dat 2024-11-21T23:07:13.245696Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:13.269921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:13.270012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:13.272290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5013, node 2 2024-11-21T23:07:13.387101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:13.387125Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:13.387133Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:13.387263Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8436 TClient is connected to server localhost:8436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:14.047624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:14.069854Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:17.308586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873079340677111:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:17.309013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:17.313456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T23:07:17.409981Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873079340677229:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:17.410060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:17.410468Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873079340677235:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:17.413882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T23:07:17.438058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873079340677237:2320], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:07:18.051742Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873062160807290:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:18.051801Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:18.253021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:07:18.795610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2024-11-21T23:07:19.515220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:07:20.329124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:07:20.964396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:07:21.541885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T23:07:21.608313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T23:07:26.459554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710723:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> TYardTest::TestEnormousDisk [GOOD] |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut >> TGRpcNewCoordinationClient::CreateDropDescribe |79.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> YdbScripting::MultiResults >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl >> YdbYqlClient::TestExplicitPartitioning |79.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestEnormousDisk [GOOD] |79.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/ydb-public-sdk-cpp-client-ydb_persqueue_public-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2024-11-21T23:07:07.907953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873033607828235:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:07.908312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dfe/r3tmp/tmphR8ObQ/pdisk_1.dat 2024-11-21T23:07:08.557706Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:08.663979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:08.664069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:08.668332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24727, node 1 2024-11-21T23:07:09.087724Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:09.087749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:09.087758Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:09.087842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:09.726459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.749491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:09.758508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.763298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:09.763505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:09.763520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:09.766999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:09.767022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:09.769083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.770175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:09.772896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230429818, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:09.772932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:09.773223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:09.774934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:09.775064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:09.775108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:09.775180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:09.775208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:09.775243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:09.779709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:09.779773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:09.779791Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:09.779877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:11.983440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873050787698309:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:11.983525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:12.275587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.276195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:12.276861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:12.276884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.283475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:12.283694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:12.283938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:12.284030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:12.285898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.285934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.285951Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:12.286177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.286194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.286204Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:12.289086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:12.294449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:12.294539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:12.300109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:12.363432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:12.363461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:12.363550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:12.365566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:12.368739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230432415, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:12.368787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230432415 2024-11-21T23:07:12.368906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:12.378618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:12.378964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:12.379046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:12.385437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.385506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.385526Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: ... -21T23:07:29.828109Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.828143Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.828156Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T23:07:29.828281Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.828312Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.828324Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T23:07:29.828429Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.828447Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.828458Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T23:07:29.828558Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.828572Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.828582Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T23:07:29.833511Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T23:07:29.845630Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230449880, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:29.845682Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230449880, at schemeshard: 72057594046644480 2024-11-21T23:07:29.845817Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T23:07:29.845961Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230449880, at schemeshard: 72057594046644480 2024-11-21T23:07:29.846015Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T23:07:29.846077Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230449880, at schemeshard: 72057594046644480 2024-11-21T23:07:29.846116Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T23:07:29.846190Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732230449880 2024-11-21T23:07:29.846240Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T23:07:29.849838Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:29.850355Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:29.850434Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T23:07:29.850522Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T23:07:29.850705Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T23:07:29.850755Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T23:07:29.850834Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T23:07:29.850879Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T23:07:29.850949Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T23:07:29.850987Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T23:07:29.851012Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T23:07:29.851046Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T23:07:29.851064Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T23:07:29.851076Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T23:07:29.851094Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T23:07:29.855245Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.855285Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.855306Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:07:29.855527Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.855550Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.855560Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:07:29.855667Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.855683Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.855693Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:07:29.855794Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.855825Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.855837Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T23:07:29.855959Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:29.856014Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:29.856026Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T23:07:29.856064Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T23:07:29.863770Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439873127613910988:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:07:29.963009Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:29.963197Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:07:29.965784Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:29.983210Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439873127613911073:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:29.984952Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MTZkYmQ2MzYtMTQxMTM3YjQtOTRlZmRjMzktYmM1NmEyMWY=, ActorId: [10:7439873127613910797:2297], ActorState: ExecuteState, TraceId: 01jd8fp2mce0mphcpceregpfft, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:30.032908Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7439873131908878394:2324], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:30.035418Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=MTZkYmQ2MzYtMTQxMTM3YjQtOTRlZmRjMzktYmM1NmEyMWY=, ActorId: [10:7439873127613910797:2297], ActorState: ExecuteState, TraceId: 01jd8fp2tv744x6t6pcdnzvvha, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2024-11-21T23:07:06.582640Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873030204677181:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:06.583026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e22/r3tmp/tmpffNmQ4/pdisk_1.dat 2024-11-21T23:07:07.349108Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:07.365842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:07.365941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:07.369517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28150, node 1 2024-11-21T23:07:07.587263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:07.587288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:07.587295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:07.587398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:07.943467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.950735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:07.950807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.954579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:07.954798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:07.954815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:07.958825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:07.958859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:07.961214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.962782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:07.965919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230428012, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:07.965972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:07.966355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:07.968458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.968658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.968710Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:07.968797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:07.968841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:07.968886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:07.976196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:07.976299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:07.976335Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:07.976448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:10.652011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873047384547187:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:10.662549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:10.978887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:10.979386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusSchemeError, reason: Column Key has wrong key type Json, at schemeshard: 72057594046644480 2024-11-21T23:07:10.981898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusSchemeError, reason: Column Key has wrong key type Json, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:11.029646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873051679514517:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:11.029728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:11.039325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.039617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSchemeError, reason: Column Key has wrong key type Yson, at schemeshard: 72057594046644480 2024-11-21T23:07:11.043477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusSchemeError, reason: Column Key has wrong key type Yson, operation: CREATE TABLE, path: /Root/Test test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e22/r3tmp/tmpOxscLz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23252, node 4 TClient is connected to server localhost:14294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e22/r3tmp/tmpBgsGYc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19761, node 7 TClient is connected to server localhost:6802 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:07:24.158773Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439873108432411963:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:24.158834Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e22/r3tmp/tmp4H6HjN/pdisk_1.dat 2024-11-21T23:07:24.487349Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:24.527147Z node 10 :HIVE WARN: HIVE#72 ... known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:28.536839Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:28.537411Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:28.537512Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, ProgressState 2024-11-21T23:07:28.537715Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:28.537821Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:28.537916Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:28.539955Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.540030Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.540051Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:28.540305Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.540324Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.540336Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:28.540461Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.540476Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.540487Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T23:07:28.540603Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.540637Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.540649Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T23:07:28.540763Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.540778Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.540789Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T23:07:28.544887Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T23:07:28.551755Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230448592, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:28.551800Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230448592, at schemeshard: 72057594046644480 2024-11-21T23:07:28.551925Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T23:07:28.552041Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230448592, at schemeshard: 72057594046644480 2024-11-21T23:07:28.552088Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T23:07:28.552137Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230448592, at schemeshard: 72057594046644480 2024-11-21T23:07:28.552180Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T23:07:28.552233Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732230448592 2024-11-21T23:07:28.552280Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T23:07:28.558779Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:28.559371Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:28.559445Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T23:07:28.559542Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T23:07:28.559765Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T23:07:28.559812Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T23:07:28.559892Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T23:07:28.559930Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T23:07:28.559998Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T23:07:28.560032Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T23:07:28.560059Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T23:07:28.560092Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T23:07:28.560106Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T23:07:28.560115Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T23:07:28.560135Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T23:07:28.564945Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.565009Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.565029Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:28.565309Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.565331Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.565342Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T23:07:28.565475Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.565512Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.565525Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:07:28.565668Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.565687Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.565697Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:07:28.565818Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:28.565834Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:28.565844Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T23:07:28.565881Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T23:07:28.578598Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439873125612282226:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:07:28.664649Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:28.664835Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:07:28.668469Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e2f/r3tmp/tmpaRzjqi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14642, node 1 TClient is connected to server localhost:19166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:07:11.197711Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873054354456655:2146];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e2f/r3tmp/tmpk4CXDJ/pdisk_1.dat 2024-11-21T23:07:11.273828Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:11.343563Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:11.379535Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:11.379626Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:11.383996Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1751, node 4 2024-11-21T23:07:11.660902Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:11.660922Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:11.660937Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:11.661074Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:11.903373Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.903700Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:11.903714Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.907239Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:11.907432Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:11.907444Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:07:11.911901Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:11.914947Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:11.914971Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:11.924620Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.933844Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230431981, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:11.933876Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:11.934078Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:11.935799Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:11.935954Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:11.936006Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:11.936092Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:07:11.936138Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:07:11.936177Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:07:11.937793Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:07:11.937825Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:07:11.937841Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:11.937918Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T23:07:14.860391Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439873067239359459:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:14.860498Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:15.106868Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:15.107488Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T23:07:15.108145Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:15.108171Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:15.110816Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:15.111015Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:15.111232Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:15.111342Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:15.113545Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:15.113592Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:15.113613Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:15.113856Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:15.113871Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:15.113882Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:15.117466Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T23:07:15.127234Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:15.127342Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T23:07:15.147461Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:15.209997Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:15.210027Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:15.210141Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T23:07:15.215360Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:15.223731Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230435264, transact ... ode 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:26.572186Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:26.572561Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:26.572593Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:26.574221Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:26.574375Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:26.574421Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:07:26.579239Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:26.579266Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:26.580162Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:26.580795Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:26.585919Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230446632, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:26.585957Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:26.586218Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:26.590016Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:26.590292Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:26.590351Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:26.590462Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:07:26.590513Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:07:26.590559Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:07:26.591185Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:07:26.591232Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:07:26.591250Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:26.591319Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T23:07:30.205494Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.205940Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T23:07:30.206588Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:30.206623Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.210890Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:30.211158Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:30.211405Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:30.211481Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:30.214990Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:30.215060Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:30.215085Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:30.215415Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:30.215438Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:30.215452Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:30.215755Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T23:07:30.226882Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:30.226993Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T23:07:30.229870Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:30.310583Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:30.310622Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:30.310696Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T23:07:30.323428Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:30.340084Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230450377, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:30.340146Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230450377 2024-11-21T23:07:30.340306Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T23:07:30.356316Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:30.356688Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:30.356753Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:30.359754Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732230450377 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 16 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1410 } } 2024-11-21T23:07:30.360347Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:30.360376Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:30.360394Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:30.360575Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:30.360591Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:30.360601Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:30.362183Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:07:30.362216Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.362240Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T23:07:30.372494Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T23:07:30.372610Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T23:07:30.372671Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] Test command err: 2024-11-21T23:07:08.486262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873037725743037:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:08.486314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dfa/r3tmp/tmp5BcarF/pdisk_1.dat 2024-11-21T23:07:09.169225Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:09.217476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:09.217737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:09.233152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4285, node 1 2024-11-21T23:07:09.422361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:09.422429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:09.422437Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:09.422559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:09.795722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.810599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:09.810698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.814435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2024-11-21T23:07:09.814719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:09.814734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:09.818263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:09.818300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:09.821408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.824847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:09.826576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230429874, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:09.826624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:09.826948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:09.828836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:09.829055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:09.829112Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:09.829225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:09.829273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:09.829314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:09.831577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:09.831626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:09.831663Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:09.831744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:12.111738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.112228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:12.112610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:12.112625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.112733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/Test/uid, operationId: 281474976710658:1, transaction: WorkingDir: "/Root/Test" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "uid" KeyColumnNames: "uid" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2024-11-21T23:07:12.112830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:12.112864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test/uid/indexImplTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T23:07:12.113024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 1 -> 2 2024-11-21T23:07:12.113183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:12.119910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/Test 2024-11-21T23:07:12.120098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:12.120865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:12.120931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTableIndex TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:12.121136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:2 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:12.121308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:12.121503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:12.123303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.123331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.123350Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:12.123575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.123591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.123612Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:12.123723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.123736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.123744Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T23:07:12.123829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.123841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.123848Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2024-11-21T23:07:12.141099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:12.141178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 2 -> 3 2024-11-21T23:07:12.141460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024- ... ogressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:27.043698Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:07:27.047806Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:27.047838Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:27.052785Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:27.061434Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:27.075555Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230447115, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:27.075599Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:27.075888Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:27.083361Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:27.083557Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:27.083629Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:27.083719Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:07:27.083762Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:07:27.083815Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:07:27.085204Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:07:27.085243Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:07:27.085263Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:27.085337Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T23:07:30.541525Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.542023Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T23:07:30.542642Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:30.542677Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.548635Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:30.548876Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:30.549111Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:30.549177Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:30.551171Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:30.551218Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:30.551234Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:30.551457Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:30.551472Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:30.551483Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:30.557186Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T23:07:30.562183Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:30.562526Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:30.562663Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:30.562693Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T23:07:30.572100Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:30.658254Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:30.658297Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:30.658679Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:30.658701Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:30.658859Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:30.658869Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:30.658893Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T23:07:30.663910Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:30.677650Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230450713, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:30.677710Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230450713 2024-11-21T23:07:30.677855Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T23:07:30.683406Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:30.683743Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:30.683805Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:30.693598Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:30.693657Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:30.693676Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:30.693938Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:30.693955Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:30.693966Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:30.712673Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715658 Step: 1732230450713 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 951 } } 2024-11-21T23:07:30.713028Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715658 Step: 1732230450713 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 13 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 1228 } } 2024-11-21T23:07:30.713196Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732230450713 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 31 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1261 } } 2024-11-21T23:07:30.715409Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:07:30.715630Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:07:30.715741Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:07:30.715760Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.715795Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T23:07:30.719258Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T23:07:30.719371Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T23:07:30.719422Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 >> TYqlDecimalTests::NegativeValues [GOOD] >> TYqlDecimalTests::DecimalKey >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> TGRpcYdbTest::DropTableBadRequest >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery >> TGRpcYdbTest::BeginTxRequestError [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> Backpressure::MonteCarlo [GOOD] >> TTableProfileTests::UseDefaultProfile >> YdbYqlClient::CreateTableWithPartitionAtKeys >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial >> YdbTableBulkUpsert::ValidRetry >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::CreateAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2024-11-21T23:07:06.354954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873031271855283:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:06.355014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e14/r3tmp/tmpiwmbth/pdisk_1.dat 2024-11-21T23:07:06.971571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:06.971681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:06.994852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:07.004152Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27646, node 1 2024-11-21T23:07:07.015242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2024-11-21T23:07:07.026834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:07:07.038709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:07.038954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:07:07.057452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.063014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.063057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.063142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:07.077368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.307382Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:07.355015Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:07.355050Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:07.355058Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:07.355153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:07.758897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.764806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:07.764855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.771872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:07.772082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:07.772099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:07.778778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:07.783314Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:07.783345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:07.791221Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.803898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230427837, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:07.803961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:07.804319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:07.811413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.811651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.811716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:07.811811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:07.811863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:07.811925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:07.816008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:07.816073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:07.816095Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:07.816215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:11.826721Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873050878526484:2086];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:11.826776Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e14/r3tmp/tmpp9WF7o/pdisk_1.dat 2024-11-21T23:07:12.044186Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:12.073355Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:12.073431Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:12.076883Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27614, node 4 2024-11-21T23:07:12.162174Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:12.162195Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:12.162201Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:12.162286Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:12.380546Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.380868Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:12.380886Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.385525Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:12.385716Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:12.385731Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:12.386815Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:12.387305Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:12.387331Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:12.390926Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710 ... 94046644480 2024-11-21T23:07:32.765867Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.765925Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.765947Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:32.766265Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.766311Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.766330Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:32.766536Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.766564Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.766578Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T23:07:32.766731Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.766747Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.766758Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T23:07:32.766881Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.766896Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.766907Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T23:07:32.767510Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T23:07:32.783573Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230452820, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:32.783650Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230452820, at schemeshard: 72057594046644480 2024-11-21T23:07:32.783827Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T23:07:32.784023Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230452820, at schemeshard: 72057594046644480 2024-11-21T23:07:32.784092Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:1 128 -> 240 2024-11-21T23:07:32.784171Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230452820, at schemeshard: 72057594046644480 2024-11-21T23:07:32.784250Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:2 128 -> 240 2024-11-21T23:07:32.784391Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715658:3, HandleReply TEvOperationPlan: step# 1732230452820 2024-11-21T23:07:32.784487Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:3 128 -> 240 2024-11-21T23:07:32.788865Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:32.789533Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:32.789620Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:3 ProgressState 2024-11-21T23:07:32.789747Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:3 progress is 1/4 2024-11-21T23:07:32.789974Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:1 ProgressState 2024-11-21T23:07:32.790041Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:1 progress is 2/4 2024-11-21T23:07:32.790170Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T23:07:32.790221Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 3/4 2024-11-21T23:07:32.790319Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:2 ProgressState 2024-11-21T23:07:32.790367Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:2 progress is 4/4 2024-11-21T23:07:32.790418Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T23:07:32.790461Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:1 2024-11-21T23:07:32.790478Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:2 2024-11-21T23:07:32.790491Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:3 2024-11-21T23:07:32.790511Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 5, subscribers: 1 2024-11-21T23:07:32.792598Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.792643Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.792666Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:32.792895Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.792918Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.792934Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T23:07:32.793065Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.793087Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.793098Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:07:32.793228Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.793250Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.793263Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:07:32.793388Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:32.793406Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:32.793416Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T23:07:32.793455Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T23:07:32.797768Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439873140627531969:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:07:32.889473Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:32.889683Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:07:32.898765Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:32.904007Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZWZlOTViNjYtYzdmMjMyZS0zNzExMGJmNi1jOGYwNWVm, ActorId: [13:7439873140627531928:2295], ActorState: ExecuteState, TraceId: 01jd8fp5g81d83dp8qczc6zr3c, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2024-11-21T23:07:32.913473Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZWZlOTViNjYtYzdmMjMyZS0zNzExMGJmNi1jOGYwNWVm, ActorId: [13:7439873140627531928:2295], ActorState: ExecuteState, TraceId: 01jd8fp5nfa152dcbh1d3jtvee, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2024-11-21T23:07:32.916923Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=ZWZlOTViNjYtYzdmMjMyZS0zNzExMGJmNi1jOGYwNWVm, ActorId: [13:7439873140627531928:2295], ActorState: ExecuteState, TraceId: 01jd8fp5nm1s75pf6b2pt7x20p, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: >> YdbYqlClient::RetryOperationTemplate >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> YdbTableBulkUpsert::DataValidation [GOOD] >> YdbOlapStore::ManyTables >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut_client/unittest >> Backpressure::MonteCarlo [GOOD] Test command err: Clock# 1970-01-01T00:00:00.000000Z elapsed# 0.000142s EventsProcessed# 0 clients.size# 0 Clock# 1970-01-01T00:00:10.017321Z elapsed# 0.001230s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:21.667095Z elapsed# 0.001386s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:34.170091Z elapsed# 0.001740s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:48.784179Z elapsed# 0.001970s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:08.649041Z elapsed# 0.001992s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:27.103519Z elapsed# 0.002022s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:46.001929Z elapsed# 0.002047s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:01.598026Z elapsed# 0.002067s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:15.298539Z elapsed# 0.002085s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:31.358408Z elapsed# 0.002130s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:43.322538Z elapsed# 0.002152s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:01.555107Z elapsed# 0.002172s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:12.418373Z elapsed# 0.002192s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:27.198148Z elapsed# 0.002219s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:37.559083Z elapsed# 0.002241s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:03:51.075429Z elapsed# 0.002265s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:04:04.859030Z elapsed# 0.002287s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:04:23.952038Z elapsed# 0.002306s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:04:41.193788Z elapsed# 0.002331s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:05:00.814767Z elapsed# 0.002355s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:05:11.524443Z elapsed# 0.002374s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:05:23.255878Z elapsed# 0.002392s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:05:39.005102Z elapsed# 0.002435s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:05:49.606841Z elapsed# 0.002460s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:06:05.863084Z elapsed# 0.002480s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:06:21.986379Z elapsed# 0.002503s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:06:39.331177Z elapsed# 0.002525s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:06:56.740772Z elapsed# 0.002543s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:07:09.286555Z elapsed# 0.002568s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:07:24.260306Z elapsed# 0.002600s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:07:40.073985Z elapsed# 0.002622s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:07:59.576358Z elapsed# 0.002640s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:08:14.970615Z elapsed# 0.002667s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:08:27.925886Z elapsed# 0.002691s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:08:47.454906Z elapsed# 0.002712s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:09:02.141491Z elapsed# 0.023104s EventsProcessed# 1715 clients.size# 1 Clock# 1970-01-01T00:09:16.826306Z elapsed# 0.084640s EventsProcessed# 3466 clients.size# 1 Clock# 1970-01-01T00:09:35.067162Z elapsed# 0.084879s EventsProcessed# 3468 clients.size# 0 Clock# 1970-01-01T00:09:50.653030Z elapsed# 0.084900s EventsProcessed# 3468 clients.size# 0 Clock# 1970-01-01T00:10:07.038645Z elapsed# 0.084919s EventsProcessed# 3468 clients.size# 0 Clock# 1970-01-01T00:10:17.769473Z elapsed# 0.084939s EventsProcessed# 3468 clients.size# 0 Clock# 1970-01-01T00:10:29.099836Z elapsed# 0.142388s EventsProcessed# 4835 clients.size# 1 Clock# 1970-01-01T00:10:43.745289Z elapsed# 0.220731s EventsProcessed# 6585 clients.size# 1 Clock# 1970-01-01T00:11:03.326344Z elapsed# 0.337277s EventsProcessed# 11235 clients.size# 2 Clock# 1970-01-01T00:11:18.225998Z elapsed# 0.410435s EventsProcessed# 14780 clients.size# 2 Clock# 1970-01-01T00:11:31.173256Z elapsed# 0.441101s EventsProcessed# 17704 clients.size# 2 Clock# 1970-01-01T00:11:51.029609Z elapsed# 0.507501s EventsProcessed# 25000 clients.size# 3 Clock# 1970-01-01T00:12:08.319005Z elapsed# 0.572691s EventsProcessed# 31058 clients.size# 3 Clock# 1970-01-01T00:12:28.287223Z elapsed# 0.675818s EventsProcessed# 38035 clients.size# 3 Clock# 1970-01-01T00:12:40.903039Z elapsed# 0.763359s EventsProcessed# 42589 clients.size# 3 Clock# 1970-01-01T00:12:51.472809Z elapsed# 0.806552s EventsProcessed# 45034 clients.size# 2 Clock# 1970-01-01T00:13:03.007183Z elapsed# 0.868978s EventsProcessed# 47717 clients.size# 2 Clock# 1970-01-01T00:13:19.405827Z elapsed# 0.931861s EventsProcessed# 51573 clients.size# 2 Clock# 1970-01-01T00:13:33.115254Z elapsed# 0.960459s EventsProcessed# 53332 clients.size# 1 Clock# 1970-01-01T00:13:45.672155Z elapsed# 1.005974s EventsProcessed# 54808 clients.size# 1 Clock# 1970-01-01T00:13:57.688142Z elapsed# 1.006146s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:14:14.417742Z elapsed# 1.006167s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:14:29.531738Z elapsed# 1.006193s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:14:45.452130Z elapsed# 1.006229s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:15:01.832645Z elapsed# 1.006250s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:15:20.085036Z elapsed# 1.006271s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:15:30.226541Z elapsed# 1.006289s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:15:45.072027Z elapsed# 1.006306s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:16:00.639804Z elapsed# 1.006324s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:16:14.100196Z elapsed# 1.006344s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:16:29.650474Z elapsed# 1.006360s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:16:43.732844Z elapsed# 1.006379s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:17:00.007809Z elapsed# 1.006400s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:17:13.585475Z elapsed# 1.006418s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:17:33.248277Z elapsed# 1.006437s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:17:48.178191Z elapsed# 1.006459s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:18:03.185215Z elapsed# 1.006476s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:18:23.145379Z elapsed# 1.006493s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:18:39.238659Z elapsed# 1.006512s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:18:54.362538Z elapsed# 1.006531s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:19:06.170937Z elapsed# 1.006551s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:19:21.181720Z elapsed# 1.006569s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:19:33.341685Z elapsed# 1.006590s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:19:46.166697Z elapsed# 1.006609s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:19:59.155097Z elapsed# 1.006632s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:20:18.762305Z elapsed# 1.006652s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:20:32.854846Z elapsed# 1.006674s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:20:46.569471Z elapsed# 1.006694s EventsProcessed# 54810 clients.size# 0 Clock# 1970-01-01T00:21:03.949003Z elapsed# 1.037145s EventsProcessed# 56855 clients.size# 1 Clock# 1970-01-01T00:21:19.320442Z elapsed# 1.055384s EventsProcessed# 58683 clients.size# 1 Clock# 1970-01-01T00:21:34.250235Z elapsed# 1.072747s EventsProcessed# 60483 clients.size# 1 Clock# 1970-01-01T00:21:45.030145Z elapsed# 1.102404s EventsProcessed# 62960 clients.size# 2 Clock# 1970-01-01T00:22:03.671086Z elapsed# 1.143895s EventsProcessed# 67325 clients.size# 2 Clock# 1970-01-01T00:22:15.357328Z elapsed# 1.210269s EventsProcessed# 70141 clients.size# 2 Clock# 1970-01-01T00:22:31.837280Z elapsed# 1.338436s EventsProcessed# 74031 clients.size# 2 Clock# 1970-01-01T00:22:50.833467Z elapsed# 1.431116s EventsProcessed# 78502 clients.size# 2 Clock# 1970-01-01T00:23:00.978677Z elapsed# 1.494597s EventsProcessed# 82179 clients.size# 3 Clock# 1970-01-01T00:23:19.766457Z elapsed# 1.654272s EventsProcessed# 88885 clients.size# 3 Clock# 1970-01-01T00:23:39.147372Z elapsed# 1.849741s EventsProcessed# 95748 clients.size# 3 Clock# 1970-01-01T00:23:50.023442Z elapsed# 1.909169s EventsProcessed# 100826 clients.size# 4 Clock# 1970-01-01T00:24:00.632090Z elapsed# 1.950235s EventsProcessed# 104676 clients.size# 3 Clock# 1970-01-01T00:24:15.564774Z elapsed# 2.034881s EventsProcessed# 109915 clients.size# 3 Clock# 1970-01-01T00:24:34.210564Z elapsed# 2.128901s EventsProcessed# 116613 clients.size# 3 Clock# 1970-01-01T00:24:46.472725Z elapsed# 2.206653s EventsProcessed# 120811 clients.size# 3 Clock# 1970-01-01T00:25:00.022885Z elapsed# 2.341975s EventsProcessed# 125650 clients.size# 3 Clock# 1970-01-01T00:25:15.646894Z elapsed# 2.502198s EventsProcessed# 131224 clients.size# 3 Clock# 1970-01-01T00:25:30.384113Z elapsed# 2.585321s EventsProcessed# 136466 clients.size# 3 Clock# 1970-01-01T00:25:48.324587Z elapsed# 2.727704s EventsProcessed# 142847 clients.size# 3 Clock# 1970-01-01T00:26:04.834543Z elapsed# 2.899383s EventsProcessed# 148650 clients.size# 3 Clock# 1970-01-01T00:26:23.685415Z elapsed# 3.025246s EventsProcessed# 155343 clients.size# 3 Clock# 1970-01-01T00:26:33.837662Z elapsed# 3.114458s EventsProcessed# 158999 clients.size# 3 Clock# 1970-01-01T00:26:46.430089Z elapsed# 3.206405s EventsProcessed# 161982 clients.size# 2 Clock# 1970-01-01T00:26:59.359318Z elapsed# 3.264045s EventsProcessed# 165054 clients.size# 2 Clock# 1970-01-01T00:27:11.771261Z elapsed# 3.294546s EventsProcessed# 167962 clients.size# 2 Clock# 1970-01-01T00:27:24.328314Z elapsed# 3.353393s EventsProcessed# 171012 clients.size# 2 Clock# 1970-01-01T00:27:41.568342Z elapsed# 3.466967s EventsProcessed# 175173 clients.size# 2 Clock# 1970-01-01T00:27:59.472904Z elapsed# 3.609644s EventsProcessed# 179603 clients.size# 2 Clock# 1970-01-01T00:28:18.904748Z elapsed# 3.659493s EventsProcessed# 181906 clients.size# 1 Clock# 1970-01-01T00:28:30.511006Z elapsed# 3.659607s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:28:42.238762Z elapsed# 3.659628s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:29:00.507823Z elapsed# 3.659648s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:29:13.377438Z elapsed# 3.659663s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:29:25.903884Z elapsed# 3.659797s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:29:38.305031Z elapsed# 3.659814s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:29:57.259761Z elapsed# 3.659829s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:30:09.989145Z elapsed# 3.659843s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:30:28.533200Z elapsed# 3.659857s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:30:47.851652Z elapsed# 3.659868s EventsProcessed# 181908 clients.size# 0 Clock# 1970-01-01T00:31:00.669175Z elapsed# 3.706302s EventsProcessed# 183486 clients.size# 1 Clock# 1970-01-01T00:31:15.891825Z elapsed# 3.749408s EventsProcessed# 185412 clients.size# 1 Clock# 1970-01-01T00:31:33.103544Z elapsed# 3.749591s EventsProcessed# 185414 clients.size# 0 Clock# 1970-01-01T00:31:48.153021Z elapsed# 3.749670s EventsProcessed# 185414 clients.size# 0 Clock# 1970-01-01T00:32:02.797717Z elapsed# 3.805827s EventsProcessed# 187176 clients.size# 1 Clock# 1970-01-01T00:32:14.579201Z elapsed# 3.851760s EventsProcessed# 188665 clients.size# 1 Clock# 1970-01-01T00:32:31. ... EventsProcessed# 14432542 clients.size# 9 Clock# 1970-01-01T05:29:55.355304Z elapsed# 240.586212s EventsProcessed# 14449658 clients.size# 9 Clock# 1970-01-01T05:30:12.491134Z elapsed# 240.795444s EventsProcessed# 14467787 clients.size# 9 Clock# 1970-01-01T05:30:27.336268Z elapsed# 241.071588s EventsProcessed# 14485158 clients.size# 10 Clock# 1970-01-01T05:30:39.120937Z elapsed# 241.247440s EventsProcessed# 14497581 clients.size# 9 Clock# 1970-01-01T05:30:57.479917Z elapsed# 241.579202s EventsProcessed# 14517440 clients.size# 9 Clock# 1970-01-01T05:31:11.003374Z elapsed# 241.772521s EventsProcessed# 14531877 clients.size# 9 Clock# 1970-01-01T05:31:30.429833Z elapsed# 242.171391s EventsProcessed# 14552651 clients.size# 9 Clock# 1970-01-01T05:31:40.810182Z elapsed# 242.345192s EventsProcessed# 14563846 clients.size# 9 Clock# 1970-01-01T05:31:53.790742Z elapsed# 242.538346s EventsProcessed# 14577636 clients.size# 9 Clock# 1970-01-01T05:32:08.479996Z elapsed# 242.761129s EventsProcessed# 14593212 clients.size# 9 Clock# 1970-01-01T05:32:27.963159Z elapsed# 243.075762s EventsProcessed# 14614436 clients.size# 9 Clock# 1970-01-01T05:32:45.913956Z elapsed# 243.354623s EventsProcessed# 14633267 clients.size# 9 Clock# 1970-01-01T05:33:00.356755Z elapsed# 243.548150s EventsProcessed# 14648744 clients.size# 9 Clock# 1970-01-01T05:33:19.775797Z elapsed# 243.960532s EventsProcessed# 14669292 clients.size# 9 Clock# 1970-01-01T05:33:36.318044Z elapsed# 244.190416s EventsProcessed# 14686906 clients.size# 9 Clock# 1970-01-01T05:33:49.958957Z elapsed# 244.420271s EventsProcessed# 14699887 clients.size# 8 Clock# 1970-01-01T05:34:04.292577Z elapsed# 244.604374s EventsProcessed# 14711960 clients.size# 7 Clock# 1970-01-01T05:34:22.716019Z elapsed# 244.793689s EventsProcessed# 14727345 clients.size# 7 Clock# 1970-01-01T05:34:32.850288Z elapsed# 244.881663s EventsProcessed# 14735719 clients.size# 7 Clock# 1970-01-01T05:34:43.084546Z elapsed# 244.966002s EventsProcessed# 14744124 clients.size# 7 Clock# 1970-01-01T05:34:58.202708Z elapsed# 245.090674s EventsProcessed# 14756613 clients.size# 7 Clock# 1970-01-01T05:35:12.793149Z elapsed# 245.226196s EventsProcessed# 14767024 clients.size# 6 Clock# 1970-01-01T05:35:23.814725Z elapsed# 245.291737s EventsProcessed# 14773595 clients.size# 5 Clock# 1970-01-01T05:35:40.747813Z elapsed# 245.396875s EventsProcessed# 14783416 clients.size# 5 Clock# 1970-01-01T05:35:57.371638Z elapsed# 245.494520s EventsProcessed# 14793470 clients.size# 5 Clock# 1970-01-01T05:36:09.604738Z elapsed# 245.595945s EventsProcessed# 14800612 clients.size# 5 Clock# 1970-01-01T05:36:20.231328Z elapsed# 245.661147s EventsProcessed# 14806916 clients.size# 5 Clock# 1970-01-01T05:36:38.407003Z elapsed# 245.752408s EventsProcessed# 14815652 clients.size# 4 Clock# 1970-01-01T05:36:52.435758Z elapsed# 245.819758s EventsProcessed# 14822191 clients.size# 4 Clock# 1970-01-01T05:37:11.749778Z elapsed# 245.917451s EventsProcessed# 14831509 clients.size# 4 Clock# 1970-01-01T05:37:25.265644Z elapsed# 246.021558s EventsProcessed# 14837968 clients.size# 4 Clock# 1970-01-01T05:37:42.507697Z elapsed# 246.083867s EventsProcessed# 14844149 clients.size# 3 Clock# 1970-01-01T05:37:56.857001Z elapsed# 246.132547s EventsProcessed# 14849289 clients.size# 3 Clock# 1970-01-01T05:38:10.442475Z elapsed# 246.179148s EventsProcessed# 14853957 clients.size# 3 Clock# 1970-01-01T05:38:21.115611Z elapsed# 246.204782s EventsProcessed# 14856444 clients.size# 2 Clock# 1970-01-01T05:38:33.362574Z elapsed# 246.233361s EventsProcessed# 14859362 clients.size# 2 Clock# 1970-01-01T05:38:45.686117Z elapsed# 246.262155s EventsProcessed# 14862256 clients.size# 2 Clock# 1970-01-01T05:39:02.918729Z elapsed# 246.303288s EventsProcessed# 14866392 clients.size# 2 Clock# 1970-01-01T05:39:14.909738Z elapsed# 246.332598s EventsProcessed# 14869384 clients.size# 2 Clock# 1970-01-01T05:39:30.959066Z elapsed# 246.401490s EventsProcessed# 14873114 clients.size# 2 Clock# 1970-01-01T05:39:45.067103Z elapsed# 246.435813s EventsProcessed# 14876454 clients.size# 2 Clock# 1970-01-01T05:40:03.640466Z elapsed# 246.525666s EventsProcessed# 14880824 clients.size# 2 Clock# 1970-01-01T05:40:18.250054Z elapsed# 246.559802s EventsProcessed# 14884283 clients.size# 2 Clock# 1970-01-01T05:40:34.455547Z elapsed# 246.577756s EventsProcessed# 14886234 clients.size# 1 Clock# 1970-01-01T05:40:46.538371Z elapsed# 246.592683s EventsProcessed# 14887709 clients.size# 1 Clock# 1970-01-01T05:40:59.893777Z elapsed# 246.612148s EventsProcessed# 14889318 clients.size# 1 Clock# 1970-01-01T05:41:13.186516Z elapsed# 246.629271s EventsProcessed# 14890959 clients.size# 1 Clock# 1970-01-01T05:41:23.256391Z elapsed# 246.642522s EventsProcessed# 14892135 clients.size# 1 Clock# 1970-01-01T05:41:37.546092Z elapsed# 246.671536s EventsProcessed# 14893809 clients.size# 1 Clock# 1970-01-01T05:41:53.520588Z elapsed# 246.699179s EventsProcessed# 14895659 clients.size# 1 Clock# 1970-01-01T05:42:03.592525Z elapsed# 246.711629s EventsProcessed# 14896841 clients.size# 1 Clock# 1970-01-01T05:42:16.765606Z elapsed# 246.732471s EventsProcessed# 14898433 clients.size# 1 Clock# 1970-01-01T05:42:31.380130Z elapsed# 246.756697s EventsProcessed# 14900220 clients.size# 1 Clock# 1970-01-01T05:42:48.874610Z elapsed# 246.779141s EventsProcessed# 14902337 clients.size# 1 Clock# 1970-01-01T05:43:04.196466Z elapsed# 246.800327s EventsProcessed# 14904145 clients.size# 1 Clock# 1970-01-01T05:43:14.819026Z elapsed# 246.850153s EventsProcessed# 14905408 clients.size# 1 Clock# 1970-01-01T05:43:31.704128Z elapsed# 246.877296s EventsProcessed# 14907414 clients.size# 1 Clock# 1970-01-01T05:43:48.781486Z elapsed# 246.965285s EventsProcessed# 14911346 clients.size# 2 Clock# 1970-01-01T05:44:01.926607Z elapsed# 247.002159s EventsProcessed# 14914403 clients.size# 2 Clock# 1970-01-01T05:44:15.397181Z elapsed# 247.042205s EventsProcessed# 14917636 clients.size# 2 Clock# 1970-01-01T05:44:32.764894Z elapsed# 247.094530s EventsProcessed# 14921593 clients.size# 2 Clock# 1970-01-01T05:44:46.998990Z elapsed# 247.137215s EventsProcessed# 14924876 clients.size# 2 Clock# 1970-01-01T05:45:01.234315Z elapsed# 247.227240s EventsProcessed# 14928195 clients.size# 2 Clock# 1970-01-01T05:45:13.215217Z elapsed# 247.266301s EventsProcessed# 14930991 clients.size# 2 Clock# 1970-01-01T05:45:24.294590Z elapsed# 247.312139s EventsProcessed# 14933540 clients.size# 2 Clock# 1970-01-01T05:45:42.035377Z elapsed# 247.367896s EventsProcessed# 14937756 clients.size# 2 Clock# 1970-01-01T05:46:01.421717Z elapsed# 247.411008s EventsProcessed# 14942387 clients.size# 2 Clock# 1970-01-01T05:46:16.268486Z elapsed# 247.472261s EventsProcessed# 14945833 clients.size# 2 Clock# 1970-01-01T05:46:35.721039Z elapsed# 247.517796s EventsProcessed# 14950276 clients.size# 2 Clock# 1970-01-01T05:46:55.045906Z elapsed# 247.563339s EventsProcessed# 14954908 clients.size# 2 Clock# 1970-01-01T05:47:10.698616Z elapsed# 247.598341s EventsProcessed# 14958522 clients.size# 2 Clock# 1970-01-01T05:47:27.984885Z elapsed# 247.637537s EventsProcessed# 14962541 clients.size# 2 Clock# 1970-01-01T05:47:40.293894Z elapsed# 247.665496s EventsProcessed# 14965430 clients.size# 2 Clock# 1970-01-01T05:47:53.341308Z elapsed# 247.701474s EventsProcessed# 14968466 clients.size# 2 Clock# 1970-01-01T05:48:05.030225Z elapsed# 247.729901s EventsProcessed# 14971268 clients.size# 2 Clock# 1970-01-01T05:48:16.965461Z elapsed# 247.758645s EventsProcessed# 14974302 clients.size# 2 Clock# 1970-01-01T05:48:33.057660Z elapsed# 247.794778s EventsProcessed# 14978103 clients.size# 2 Clock# 1970-01-01T05:48:44.741245Z elapsed# 247.815970s EventsProcessed# 14980737 clients.size# 2 Clock# 1970-01-01T05:48:59.214035Z elapsed# 247.869615s EventsProcessed# 14984140 clients.size# 2 Clock# 1970-01-01T05:49:15.064618Z elapsed# 247.896192s EventsProcessed# 14987896 clients.size# 2 Clock# 1970-01-01T05:49:25.873010Z elapsed# 247.918686s EventsProcessed# 14990493 clients.size# 2 Clock# 1970-01-01T05:49:37.856020Z elapsed# 247.944887s EventsProcessed# 14993375 clients.size# 2 Clock# 1970-01-01T05:49:56.086405Z elapsed# 247.983255s EventsProcessed# 14997710 clients.size# 2 Clock# 1970-01-01T05:50:11.171029Z elapsed# 248.037288s EventsProcessed# 15002982 clients.size# 3 Clock# 1970-01-01T05:50:28.229978Z elapsed# 248.099970s EventsProcessed# 15009146 clients.size# 3 Clock# 1970-01-01T05:50:41.221226Z elapsed# 248.132482s EventsProcessed# 15012367 clients.size# 2 Clock# 1970-01-01T05:50:54.711972Z elapsed# 248.164955s EventsProcessed# 15015557 clients.size# 2 Clock# 1970-01-01T05:51:12.087344Z elapsed# 248.186521s EventsProcessed# 15017649 clients.size# 1 Clock# 1970-01-01T05:51:31.783229Z elapsed# 248.186682s EventsProcessed# 15017651 clients.size# 0 Clock# 1970-01-01T05:51:49.344740Z elapsed# 248.186708s EventsProcessed# 15017651 clients.size# 0 Clock# 1970-01-01T05:52:04.094808Z elapsed# 248.235348s EventsProcessed# 15019329 clients.size# 1 Clock# 1970-01-01T05:52:17.394511Z elapsed# 248.251000s EventsProcessed# 15020854 clients.size# 1 Clock# 1970-01-01T05:52:29.768514Z elapsed# 248.265528s EventsProcessed# 15022288 clients.size# 1 Clock# 1970-01-01T05:52:44.100122Z elapsed# 248.284404s EventsProcessed# 15024024 clients.size# 1 Clock# 1970-01-01T05:53:00.982928Z elapsed# 248.304825s EventsProcessed# 15026030 clients.size# 1 Clock# 1970-01-01T05:53:18.486699Z elapsed# 248.347614s EventsProcessed# 15030243 clients.size# 2 Clock# 1970-01-01T05:53:34.237547Z elapsed# 248.385800s EventsProcessed# 15034054 clients.size# 2 Clock# 1970-01-01T05:53:50.335950Z elapsed# 248.424186s EventsProcessed# 15037818 clients.size# 2 Clock# 1970-01-01T05:54:09.860042Z elapsed# 248.469517s EventsProcessed# 15042345 clients.size# 2 Clock# 1970-01-01T05:54:20.440294Z elapsed# 248.495946s EventsProcessed# 15044971 clients.size# 2 Clock# 1970-01-01T05:54:37.619291Z elapsed# 248.538329s EventsProcessed# 15049163 clients.size# 2 Clock# 1970-01-01T05:54:53.759163Z elapsed# 248.577284s EventsProcessed# 15053060 clients.size# 2 Clock# 1970-01-01T05:55:04.470351Z elapsed# 248.634966s EventsProcessed# 15055568 clients.size# 2 Clock# 1970-01-01T05:55:20.442522Z elapsed# 248.670586s EventsProcessed# 15059331 clients.size# 2 Clock# 1970-01-01T05:55:38.851591Z elapsed# 248.711825s EventsProcessed# 15063504 clients.size# 2 Clock# 1970-01-01T05:55:51.065253Z elapsed# 248.735127s EventsProcessed# 15066484 clients.size# 2 Clock# 1970-01-01T05:56:06.944872Z elapsed# 248.769633s EventsProcessed# 15070205 clients.size# 2 Clock# 1970-01-01T05:56:26.681162Z elapsed# 248.817444s EventsProcessed# 15074858 clients.size# 2 Clock# 1970-01-01T05:56:39.366393Z elapsed# 248.847299s EventsProcessed# 15077774 clients.size# 2 Clock# 1970-01-01T05:56:51.024242Z elapsed# 248.875306s EventsProcessed# 15080563 clients.size# 2 Clock# 1970-01-01T05:57:06.512350Z elapsed# 248.913489s EventsProcessed# 15084141 clients.size# 2 Clock# 1970-01-01T05:57:25.397604Z elapsed# 248.957226s EventsProcessed# 15088704 clients.size# 2 Clock# 1970-01-01T05:57:45.229792Z elapsed# 249.038287s EventsProcessed# 15093399 clients.size# 2 Clock# 1970-01-01T05:58:05.048436Z elapsed# 249.089065s EventsProcessed# 15098179 clients.size# 2 Clock# 1970-01-01T05:58:16.661664Z elapsed# 249.118226s EventsProcessed# 15100954 clients.size# 2 Clock# 1970-01-01T05:58:28.849230Z elapsed# 249.148588s EventsProcessed# 15103848 clients.size# 2 Clock# 1970-01-01T05:58:45.211546Z elapsed# 249.188624s EventsProcessed# 15107710 clients.size# 2 Clock# 1970-01-01T05:59:01.624754Z elapsed# 249.228647s EventsProcessed# 15111574 clients.size# 2 Clock# 1970-01-01T05:59:16.833105Z elapsed# 249.266806s EventsProcessed# 15115230 clients.size# 2 Clock# 1970-01-01T05:59:29.457848Z elapsed# 249.298374s EventsProcessed# 15118273 clients.size# 2 Clock# 1970-01-01T05:59:47.746909Z elapsed# 249.342968s EventsProcessed# 15122509 clients.size# 2 Clock# 1970-01-01T05:59:59.912696Z elapsed# 249.372674s EventsProcessed# 15125362 clients.size# 2 >> YdbS3Internal::BadRequests [GOOD] >> YdbScripting::BasicV0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] Test command err: 2024-11-21T23:07:05.718302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873025240517639:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:05.718361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e28/r3tmp/tmpesKRRn/pdisk_1.dat 2024-11-21T23:07:06.689165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:06.689318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:06.712029Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:06.721974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:06.723863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 11461, node 1 2024-11-21T23:07:06.904364Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:06.904386Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:06.904393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:06.904484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:07.239816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.246466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:07.246549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.255153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:07.255381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:07.255408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:07.258256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:07.258291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:07.262433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:07.263004Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.276237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230427312, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:07.276349Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:07.276685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:07.278916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.279111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.279171Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:07.279257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:07.279336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:07.279403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:07.282231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:07.282312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:07.282352Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:07.284084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:07.435812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.436041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:07.437976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:07.438013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.443476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T23:07:07.443668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.443890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.443947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T23:07:07.445631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:07.445672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:07.445695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:07.445936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:07.445971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:07.445995Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:07.450129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:07.460308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:07.460414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:07.524105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:07.526022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:07.529991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230427578, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:07.530042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T23:07:07.530166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:07:07.532413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.532696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.532760Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:07.532826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:07:07.532870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:07.533015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T23:07:07.534339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:07.534369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:07.534384Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:07.534613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:07.534641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23: ... 24-11-21T23:07:29.993327Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:30.574995Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.575360Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:30.575393Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.583223Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:30.583424Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:30.583439Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:30.585947Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:30.585967Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:30.588259Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:30.592773Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230450636, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:30.592817Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:30.593112Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:30.599312Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:30.603507Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:30.603768Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:30.603832Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:30.603947Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:30.603995Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:30.604442Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:30.606039Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:30.606075Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:30.606095Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:30.606168Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:30.725556Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TheTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.726150Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:30.726841Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:30.726869Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.730020Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TheTable 2024-11-21T23:07:30.730284Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:30.730569Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:30.730702Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:30.731261Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:30.731294Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:30.731314Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:30.731556Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:30.731583Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:30.731598Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:30.734126Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:30.743426Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:30.743526Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:30.747645Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:30.839668Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:30.839702Z node 13 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:30.839803Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:30.846221Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:30.855567Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230450895, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:30.855660Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230450895 2024-11-21T23:07:30.855827Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:30.871166Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:30.871485Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:30.871554Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:30.874202Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:30.874263Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:30.874290Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:30.874614Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:30.874659Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:30.874693Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:30.958835Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710658 Step: 1732230450895 OrderId: 281474976710658 ExecLatency: 3 ProposeLatency: 23 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 6295 } } 2024-11-21T23:07:30.959197Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:07:30.959252Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.959278Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 129 -> 240 2024-11-21T23:07:30.961911Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:30.962078Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:07:30.962168Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::DataValidation [GOOD] Test command err: 2024-11-21T23:07:08.158542Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873037579282323:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:08.158581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dfb/r3tmp/tmpali79q/pdisk_1.dat 2024-11-21T23:07:09.000053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:09.000145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:09.006779Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:09.007456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4209, node 1 2024-11-21T23:07:09.307078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:09.307108Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:09.307121Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:09.307259Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:09.723051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.751433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:09.751491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.770768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:09.770932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:09.770951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:09.775583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:09.779109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:09.779138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:09.787286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.807777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230429846, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:09.807829Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:09.808091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:09.811763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:09.811989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:09.812049Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:09.812139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:09.812180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:09.812249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:09.821485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:09.821554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:09.821573Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:09.821675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:12.211064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestNotNullColumns, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.211727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:12.212395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:12.212429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.214790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestNotNullColumns 2024-11-21T23:07:12.214948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:12.215105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:12.215167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:12.216417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.216474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.216491Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:12.216813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.216844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.216856Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:12.218259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:12.225110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:12.225201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:12.231580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:12.300149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:12.300175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:12.300275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:12.302212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:12.305621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230432352, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:12.305675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230432352 2024-11-21T23:07:12.305796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:12.315258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:12.315639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:12.315697Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:12.318057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.318104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.318120Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:12.318317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.318333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.318358Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594 ... rocessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:30.751012Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.751380Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:30.751404Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.755537Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:30.755703Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:30.755713Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:07:30.761150Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:30.761193Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:30.762930Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:30.764397Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:30.766952Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230450811, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:30.766987Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:30.767268Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:30.769279Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:30.769457Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:30.769517Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:30.769614Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:07:30.769649Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:07:30.769688Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:07:30.771566Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:07:30.771627Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:07:30.771642Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:30.771740Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T23:07:34.195825Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestInvalidData, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.196636Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T23:07:34.197354Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:34.197379Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.203756Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestInvalidData 2024-11-21T23:07:34.203985Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:34.204222Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:34.204292Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:34.205890Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:34.205953Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:34.205975Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:34.206212Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:34.206244Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:34.206257Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:34.211736Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T23:07:34.236378Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:34.236475Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T23:07:34.244524Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:34.343867Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:34.343899Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:34.343978Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T23:07:34.348487Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:34.369392Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230454416, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:34.369451Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230454416 2024-11-21T23:07:34.369582Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 129 2024-11-21T23:07:34.375653Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:34.376025Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:34.376093Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:34.378139Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:34.378175Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:34.378195Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:34.382012Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:34.382067Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:34.382089Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:34.382466Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715658 Step: 1732230454416 OrderId: 281474976715658 ExecLatency: 0 ProposeLatency: 8 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1367 } } 2024-11-21T23:07:34.382686Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:07:34.382716Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.382742Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 129 -> 240 2024-11-21T23:07:34.391499Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T23:07:34.391618Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T23:07:34.391673Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:18079 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData'Invalid DyNumber string representation >> TFlatTableExecutorIndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_FlatIndex >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> YdbMonitoring::SelfCheck >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] >> TGRpcClientLowTest::ChangeAcl [GOOD] >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryBadRequest >> YdbYqlClient::TestColumnOrder >> TFlatTableExecutorIndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_BTreeIndex >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |79.8%| [TM] {RESULT} ydb/core/blobstorage/backpressure/ut_client/unittest |79.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> YdbScripting::MultiResults [GOOD] >> YdbScripting::Params |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |79.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/ydb-public-sdk-cpp-client-ydb_persqueue_core-ut >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2024-11-21T23:07:10.996944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873049748109619:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:10.997021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001def/r3tmp/tmpBkAXJY/pdisk_1.dat 2024-11-21T23:07:11.547721Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:11.556943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:11.557094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:11.574435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27433, node 1 2024-11-21T23:07:11.982877Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:11.982913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:11.982925Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:11.983087Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:12.363984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.382742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:12.382810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.387408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:12.387603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:12.387616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:12.389456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:12.389472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:12.390702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:12.393926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230432436, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:12.393971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:12.394228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:12.395089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:12.395901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:12.396050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:12.396088Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:12.396161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:12.396207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:12.396256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:12.401272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:12.401320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:12.401339Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:12.401437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2024-11-21T23:07:14.815950Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T23:07:14.845448Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2024-11-21T23:07:16.502236Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873071784532026:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:16.502286Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001def/r3tmp/tmpRwy1Kp/pdisk_1.dat 2024-11-21T23:07:16.716917Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:16.732920Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:16.732998Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:16.739837Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5259, node 4 2024-11-21T23:07:16.827067Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:16.827091Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:16.827102Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:16.827201Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:17.078960Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:17.079358Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:17.079378Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:17.083668Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:17.083831Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:17.083844Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:17.086196Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:17.086217Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T23:07:17.091062Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:17.098560Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:17.099098Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230437147, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:17.099122Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:17.099382Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:17.101250Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:17.101390Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:17.101433Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:17.101501Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is ... Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:34.461489Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.461827Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:34.461859Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:34.468974Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:34.469160Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:34.469175Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:34.479187Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:34.479220Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:34.479980Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:34.486150Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.498963Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230454542, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:34.499015Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:34.499297Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:34.501905Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:34.502069Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:34.502125Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:34.502243Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:34.502284Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:34.502342Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:34.504316Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:34.504368Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:34.504386Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:34.504463Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:34.573209Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/TheDirectory, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.573443Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:34.580164Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/TheDirectory 2024-11-21T23:07:34.580425Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:34.580688Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:34.580757Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:34.581730Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:34.581771Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:34.581795Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:34.582047Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:34.582068Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:34.582079Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T23:07:34.583259Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:34.591562Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230454633, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:34.591616Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230454633, at schemeshard: 72057594046644480 2024-11-21T23:07:34.591769Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:07:34.595436Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:34.595676Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:34.595738Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:34.595834Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:07:34.595875Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:34.595933Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T23:07:34.597575Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:34.597613Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:34.597629Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:34.597820Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:34.597836Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:34.597867Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:34.597910Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:07:34.622896Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/TheDirectory, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.623224Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:34.623248Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.623341Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:07:34.623434Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:07:34.623448Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 2, subscribers: 0 2024-11-21T23:07:34.626682Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/TheDirectory, set owner:qqq, add access: +R:qqq, add access: -():qqq:- 2024-11-21T23:07:34.626898Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:34.627263Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:34.628715Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:07:34.628777Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:07:34.628808Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T23:07:34.629079Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:07:34.629106Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:07:34.629117Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:07:34.629155Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T23:07:40.265506Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.265530Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.265550Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:40.265995Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:40.266037Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.266055Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.267056Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006164s 2024-11-21T23:07:40.267788Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:40.268312Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:40.268389Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.269465Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.269484Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.269501Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:40.269853Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:40.269887Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.269911Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.269963Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009847s 2024-11-21T23:07:40.270306Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:40.270719Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:40.270763Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.271585Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.271603Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.271618Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:40.272220Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:07:40.272256Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.272271Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.272330Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.194439s 2024-11-21T23:07:40.272959Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:40.273529Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:40.273621Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.274619Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.274637Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.274653Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:40.275145Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:07:40.275179Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.275201Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.275252Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.172318s 2024-11-21T23:07:40.275706Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:40.276083Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:40.276138Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.281512Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.281556Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.281577Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:40.281881Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:40.282235Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:40.294160Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.295180Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T23:07:40.295216Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.295236Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.295289Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.279147s 2024-11-21T23:07:40.295473Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:07:40.296936Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.296963Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.296983Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:40.297366Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:40.297750Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:40.297951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.298456Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:40.400755Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.401026Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:40.401083Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:40.401121Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:07:40.401192Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:07:40.502352Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:40.502592Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:07:40.504029Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.504061Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.504079Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:40.504427Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:40.505043Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:40.506858Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.508656Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:40.615430Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:40.615617Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:40.615662Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:40.615708Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:07:40.615781Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T23:07:40.615863Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:07:40.616156Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:07:40.616240Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:40.616389Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2024-11-21T23:06:37.423175Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872906067379615:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:37.423463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002407/r3tmp/tmpLhe0eH/pdisk_1.dat 2024-11-21T23:06:38.576954Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:38.613962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:38.614066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:38.614648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:38.622877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21400, node 1 2024-11-21T23:06:38.958054Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:38.958087Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:38.958095Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:38.958223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:39.576981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:39.607015Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:06:39.679437Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:42.392181Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872906067379615:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:42.392305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:06:43.149552Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZGMzMGE4ODAtYjUyMWYwZDgtNGQwMThlMzktZjEyNDgwZmE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZGMzMGE4ODAtYjUyMWYwZDgtNGQwMThlMzktZjEyNDgwZmE= 2024-11-21T23:06:43.162758Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZGMzMGE4ODAtYjUyMWYwZDgtNGQwMThlMzktZjEyNDgwZmE=, ActorId: [1:7439872931837183874:2301], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:43.176565Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:43.176603Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was disabled 2024-11-21T23:06:43.227445Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM= 2024-11-21T23:06:43.227733Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:43.227912Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ReadyState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439872931837183875:2297] database: Root databaseId: /Root pool id: 2024-11-21T23:06:43.228004Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ReadyState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Sending CompileQuery request 2024-11-21T23:06:43.535355Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, ExecutePhyTx, tx: 0x000050C00002D1D8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T23:06:43.535416Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Sending to Executer TraceId: 0 8 2024-11-21T23:06:43.535518Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Created new KQP executer: [1:7439872931837183882:2302] isRollback: 0 2024-11-21T23:06:43.581926Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Forwarded TEvStreamData to [1:7439872931837183875:2297] 2024-11-21T23:06:43.597701Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T23:06:43.598022Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, txInfo Status: Committed Kind: Pure TotalDuration: 62.786 ServerDuration: 62.669 QueriesCount: 2 2024-11-21T23:06:43.598103Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:06:43.598348Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:06:43.598375Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, EndCleanup, isFinal: 1 2024-11-21T23:06:43.598474Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: ExecuteState, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7439872906067379711:2256] 2024-11-21T23:06:43.598520Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: unknown state, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Cleanup temp tables: 0 2024-11-21T23:06:43.598773Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTQ5ODhiZGItNDBhMWU5MmUtNmYxMWM0MzctMTk0ZmRhYmM=, ActorId: [1:7439872931837183876:2302], ActorState: unknown state, TraceId: 01jd8fmn4ve5pwg3em57t5b466, Session actor destroyed 2024-11-21T23:06:43.613867Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZGMzMGE4ODAtYjUyMWYwZDgtNGQwMThlMzktZjEyNDgwZmE=, ActorId: [1:7439872931837183874:2301], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:06:43.613922Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZGMzMGE4ODAtYjUyMWYwZDgtNGQwMThlMzktZjEyNDgwZmE=, ActorId: [1:7439872931837183874:2301], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:06:43.613943Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZGMzMGE4ODAtYjUyMWYwZDgtNGQwMThlMzktZjEyNDgwZmE=, ActorId: [1:7439872931837183874:2301], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:06:43.613964Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZGMzMGE4ODAtYjUyMWYwZDgtNGQwMThlMzktZjEyNDgwZmE=, ActorId: [1:7439872931837183874:2301], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:06:43.614031Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZGMzMGE4ODAtYjUyMWYwZDgtNGQwMThlMzktZjEyNDgwZmE=, ActorId: [1:7439872931837183874:2301], ActorState: unknown state, Session actor destroyed 2024-11-21T23:06:45.293364Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439872939288029403:2128];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:45.294966Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002407/r3tmp/tmpoFCIvh/pdisk_1.dat 2024-11-21T23:06:45.719129Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:45.807755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:45.807834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:45.816881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14571, node 2 2024-11-21T23:06:46.107083Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:46.107105Z node 2 :NET_CL ... `.sys/nodes`; 2024-11-21T23:07:33.274211Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T23:07:33.274338Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ReadyState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, received request, proxyRequestId: 6 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpcActor: [8:7439873146269833128:2337] database: /Root databaseId: /Root pool id: default 2024-11-21T23:07:33.274377Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7439873146269833127:2336], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY= 2024-11-21T23:07:33.274437Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439873146269833130:2338], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T23:07:33.274544Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7439873146269833131:2339], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, Start pool fetching 2024-11-21T23:07:33.274577Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439873146269833132:2340], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T23:07:33.275178Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439873146269833130:2338], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2024-11-21T23:07:33.275274Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439873146269833132:2340], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2024-11-21T23:07:33.275300Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2024-11-21T23:07:33.275353Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7439873146269833131:2339], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, Pool info successfully resolved 2024-11-21T23:07:33.275412Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY= 2024-11-21T23:07:33.275495Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439873137679898472:2321], DatabaseId: /Root, PoolId: default, Received new request, worker id: [8:7439873146269833127:2336], session id: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY= 2024-11-21T23:07:33.275548Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439873137679898472:2321], DatabaseId: /Root, PoolId: default, Reply continue success to [8:7439873146269833127:2336], session id: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, local in flight: 1 2024-11-21T23:07:33.275568Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY= 2024-11-21T23:07:33.275621Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, continue request, pool id: default 2024-11-21T23:07:33.275691Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, Sending CompileQuery request 2024-11-21T23:07:33.276002Z node 8 :KQP_SESSION INFO: Scheme error, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], status: PathNotTable 2024-11-21T23:07:34.215552Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, ExecutePhyTx, tx: 0x000050C0001435D8 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:07:34.215621Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, Sending to Executer TraceId: 0 8 2024-11-21T23:07:34.222520Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, Created new KQP executer: [8:7439873150564800458:2336] isRollback: 0 2024-11-21T23:07:34.252970Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T23:07:34.253074Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, ExecutePhyTx, tx: 0x000050C00044A718 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T23:07:34.253772Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:07:34.253881Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, txInfo Status: Committed Kind: ReadOnly TotalDuration: 38.452 ServerDuration: 38.369 QueriesCount: 2 2024-11-21T23:07:34.253976Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:07:34.254039Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ExecuteState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2024-11-21T23:07:34.254262Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439873137679898472:2321], DatabaseId: /Root, PoolId: default, Received cleanup request, worker id: [8:7439873146269833127:2336], session id: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, duration: 0.979549s, cpu consumed: 0.008720s 2024-11-21T23:07:34.254290Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439873137679898472:2321], DatabaseId: /Root, PoolId: default, Reply cleanup success to [8:7439873146269833127:2336], session id: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, local in flight: 0 2024-11-21T23:07:34.254333Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: CleanupState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, EndCleanup, isFinal: 0 2024-11-21T23:07:34.254386Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: CleanupState, TraceId: 01jd8fp60t1a1x4nzskc1br2m4, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7439873099025191926:2256] 2024-11-21T23:07:34.254554Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: default, Duration: 0.979549s, CpuConsumed: 0.008720s, AdjustCpuQuota: 0 2024-11-21T23:07:34.255326Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, TxId: 2024-11-21T23:07:34.255465Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, TxId: 2024-11-21T23:07:34.255841Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:07:34.255899Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:07:34.255939Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:07:34.255982Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:07:34.256072Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTVmMWYzNDAtY2UzOGE4NjctNjc1MTUzMDktMWJlNjEzYzY=, ActorId: [8:7439873146269833127:2336], ActorState: unknown state, Session actor destroyed 2024-11-21T23:07:34.312100Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTQyNzZkNTktZDViNzkxNmYtNmYyNzI3ZDAtN2M0MjE1Njc=, ActorId: [8:7439873133384931009:2310], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:07:34.312181Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTQyNzZkNTktZDViNzkxNmYtNmYyNzI3ZDAtN2M0MjE1Njc=, ActorId: [8:7439873133384931009:2310], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:07:34.312212Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTQyNzZkNTktZDViNzkxNmYtNmYyNzI3ZDAtN2M0MjE1Njc=, ActorId: [8:7439873133384931009:2310], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:07:34.312247Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTQyNzZkNTktZDViNzkxNmYtNmYyNzI3ZDAtN2M0MjE1Njc=, ActorId: [8:7439873133384931009:2310], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:07:34.312360Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTQyNzZkNTktZDViNzkxNmYtNmYyNzI3ZDAtN2M0MjE1Njc=, ActorId: [8:7439873133384931009:2310], ActorState: unknown state, Session actor destroyed >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcNewCoordinationClient::BasicMethods >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> TFlatTableExecutorIndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_History_FlatIndex >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::UsesOnRetryStateDuringRetries ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2024-11-21T23:07:16.037789Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873073341629737:2222];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:16.045256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dea/r3tmp/tmpmuk20R/pdisk_1.dat 2024-11-21T23:07:16.439008Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:16.446104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:16.446202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:16.454634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14615, node 1 2024-11-21T23:07:16.522947Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:16.522969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:16.522978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:16.523056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:16.850764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:16.855179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:16.855219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:16.857554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2024-11-21T23:07:16.857778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:16.857810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:16.858805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:16.859619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:16.859646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:16.860767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:16.863991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230436909, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:16.864037Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:16.864275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:16.865608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:16.865795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:16.865842Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:16.865961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:16.866003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:16.866041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:16.867821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:16.867871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:16.867911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:16.867992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:16.931589Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jd8fnp216mmjf4bprnhcs47p, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43054, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 9.975839s 2024-11-21T23:07:16.965174Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jd8fnp2ma6xk6fvjx80e6hxr, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:43070, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T23:07:19.613301Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jd8fnrnx7npq2zzhstdsk0mc, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:57040, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T23:07:19.613819Z node 1 :TX_PROXY DEBUG: actor# [1:7439873073341629787:2138] Handle TEvProposeTransaction 2024-11-21T23:07:19.613864Z node 1 :TX_PROXY DEBUG: actor# [1:7439873073341629787:2138] TxId# 281474976710658 ProcessProposeTransaction 2024-11-21T23:07:19.613923Z node 1 :TX_PROXY DEBUG: actor# [1:7439873073341629787:2138] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7439873086226532446:2611] 2024-11-21T23:07:19.698801Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873086226532446:2611] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:57040" 2024-11-21T23:07:19.706842Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873086226532446:2611] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:07:19.706945Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873086226532446:2611] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:07:19.707111Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873086226532446:2611] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:07:19.707269Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873086226532446:2611] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:07:19.707330Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873086226532446:2611] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2024-11-21T23:07:19.707458Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873086226532446:2611] txid# 281474976710658 HANDLE EvClientConnected 2024-11-21T23:07:19.707889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:19.708576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:19.709617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:19.709652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:19.714325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:19.714551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:19.714754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:19.714827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:19.716881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:19.716922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:19.716939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:19.717161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:19.717183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:19.717195Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], vers ... HARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2024-11-21T23:07:40.480584Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2024-11-21T23:07:40.480596Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2024-11-21T23:07:40.480819Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2024-11-21T23:07:40.480834Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:07:40.480846Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2024-11-21T23:07:40.480859Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2024-11-21T23:07:40.480872Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2024-11-21T23:07:40.480903Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2024-11-21T23:07:40.482586Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7439873178572921820:2158], Recipient [10:7439873174277953469:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T23:07:40.482615Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T23:07:40.482672Z node 10 :READ_TABLE_API DEBUG: [10:7439873178572921799:2366] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2024-11-21T23:07:40.482703Z node 10 :READ_TABLE_API DEBUG: [10:7439873178572921799:2366] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2024-11-21T23:07:40.483052Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2024-11-21T23:07:40.483390Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2024-11-21T23:07:40.483738Z node 10 :READ_TABLE_API DEBUG: [10:7439873178572921799:2366] got stream part, size: 75, RU required: 128 rate limiter absent 2024-11-21T23:07:40.484693Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2024-11-21T23:07:40.484775Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2024-11-21T23:07:40.488389Z node 10 :READ_TABLE_API DEBUG: [10:7439873178572921799:2366] Starting inactivity timer for 600.000000s with tag 3 2024-11-21T23:07:40.489474Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7439873178572921800:2366], Recipient [10:7439873174277953469:2307]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2024-11-21T23:07:40.489513Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2024-11-21T23:07:40.489527Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2024-11-21T23:07:40.489573Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2024-11-21T23:07:40.489701Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7439873178572921800:2366], Recipient [10:7439873174277953469:2307]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2024-11-21T23:07:40.489721Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2024-11-21T23:07:40.489821Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7439873178572921800:2366], Recipient [10:7439873174277953469:2307]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1732230460506 TxId: 281474976715680 2024-11-21T23:07:40.490069Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2024-11-21T23:07:40.490085Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2024-11-21T23:07:40.490157Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7439873174277953469:2307], Recipient [10:7439873174277953469:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:07:40.490177Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:07:40.490203Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2024-11-21T23:07:40.490224Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:07:40.491488Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2024-11-21T23:07:40.491521Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2024-11-21T23:07:40.491551Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2024-11-21T23:07:40.491591Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2024-11-21T23:07:40.492385Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2024-11-21T23:07:40.492415Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2024-11-21T23:07:40.492432Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2024-11-21T23:07:40.492514Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayCompleteNoMoreRestarts 2024-11-21T23:07:40.492532Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2024-11-21T23:07:40.492548Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2024-11-21T23:07:40.492560Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2024-11-21T23:07:40.492604Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2024-11-21T23:07:40.493049Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2024-11-21T23:07:40.493075Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2024-11-21T23:07:40.493095Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:07:40.493110Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2024-11-21T23:07:40.493127Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2024-11-21T23:07:40.493141Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2024-11-21T23:07:40.503066Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2024-11-21T23:07:40.503108Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2024-11-21T23:07:40.503149Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 13 ms, propose latency: 24 ms, status: COMPLETE 2024-11-21T23:07:40.503229Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2024-11-21T23:07:40.506467Z node 10 :READ_TABLE_API NOTICE: [10:7439873178572921799:2366] Finish grpc stream, status: 400000 2024-11-21T23:07:40.510726Z node 10 :GRPC_SERVER DEBUG: [0x51a000042680] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.510980Z node 10 :GRPC_SERVER DEBUG: [0x51a000113480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.511162Z node 10 :GRPC_SERVER DEBUG: [0x51a000083a80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.511320Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b4080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.511482Z node 10 :GRPC_SERVER DEBUG: [0x51a000084080] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.511645Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ac880] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.511812Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c4e80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.511973Z node 10 :GRPC_SERVER DEBUG: [0x51a000085e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.512124Z node 10 :GRPC_SERVER DEBUG: [0x51a000087680] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.512266Z node 10 :GRPC_SERVER DEBUG: [0x51a000089a80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.512432Z node 10 :GRPC_SERVER DEBUG: [0x51a000057c80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.512580Z node 10 :GRPC_SERVER DEBUG: [0x51a000121880] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.512734Z node 10 :GRPC_SERVER DEBUG: [0x51a000108680] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.512889Z node 10 :GRPC_SERVER DEBUG: [0x51b0001aef80] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.513054Z node 10 :GRPC_SERVER DEBUG: [0x51a00008ac80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.513191Z node 10 :GRPC_SERVER DEBUG: [0x51a000085280] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.513344Z node 10 :GRPC_SERVER DEBUG: [0x51a000178e80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.513514Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c3c80] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.513690Z node 10 :GRPC_SERVER DEBUG: [0x51a000041a80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.513847Z node 10 :GRPC_SERVER DEBUG: [0x51a000083480] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.514001Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d5080] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.514148Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d8680] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.514294Z node 10 :GRPC_SERVER DEBUG: [0x51a000088880] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T23:07:40.514556Z node 10 :GRPC_SERVER DEBUG: [0x51a000025e80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> TYqlDecimalTests::DecimalKey [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Uint8 >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::UseTableProfilePreset >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationAsync >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2024-11-21T23:07:07.928755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873036089679181:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:07.928819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dfc/r3tmp/tmpOXrUx4/pdisk_1.dat 2024-11-21T23:07:08.572843Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16355, node 1 2024-11-21T23:07:08.709676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:08.709770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:08.747042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:09.055049Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:09.055068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:09.055075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:09.055154Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:09.470482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.480371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:09.480427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.490416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:09.490614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:09.490625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:09.494042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:09.494070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:09.495759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:09.499191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:09.499939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230429545, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:09.500002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:09.500294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:09.501998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:09.502149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:09.502190Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:09.502281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:09.502315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:09.502349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:09.505491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:09.505534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:09.505549Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:09.505633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:11.891431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.891920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:11.892561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:11.892586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.895062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table 2024-11-21T23:07:11.895276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:11.895460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:11.895521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:11.897075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:11.897112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:11.897130Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:11.897304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:11.897320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:11.897330Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:11.897595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:11.908427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:11.908521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:11.918981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:11.995377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:11.995402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:11.995470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:11.997097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:12.001649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230432044, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:12.001700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230432044 2024-11-21T23:07:12.001817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:12.007602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:12.007896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:12.007957Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:12.014104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.014176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.014210Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:12.014487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:12.014509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:12.014519Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 2814749 ... 6644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T23:07:39.665356Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:07:39.670217Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230459715, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:39.670262Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230459715, at schemeshard: 72057594046644480 2024-11-21T23:07:39.670371Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2024-11-21T23:07:39.670534Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230459715, at schemeshard: 72057594046644480 2024-11-21T23:07:39.670606Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:1 128 -> 240 2024-11-21T23:07:39.670672Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230459715, at schemeshard: 72057594046644480 2024-11-21T23:07:39.670710Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:2 128 -> 240 2024-11-21T23:07:39.670779Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, HandleReply TEvOperationPlan: step# 1732230459715 2024-11-21T23:07:39.670827Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:3 128 -> 240 2024-11-21T23:07:39.672759Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:39.673327Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:39.673398Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:2 ProgressState 2024-11-21T23:07:39.673520Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:2 progress is 1/4 2024-11-21T23:07:39.673681Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2024-11-21T23:07:39.673728Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 2/4 2024-11-21T23:07:39.673798Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:3 ProgressState 2024-11-21T23:07:39.673844Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:3 progress is 3/4 2024-11-21T23:07:39.673919Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:1 ProgressState 2024-11-21T23:07:39.673958Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:1 progress is 4/4 2024-11-21T23:07:39.673982Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:07:39.674015Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:1 2024-11-21T23:07:39.674030Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:2 2024-11-21T23:07:39.674041Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:3 2024-11-21T23:07:39.674061Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 5, subscribers: 1 2024-11-21T23:07:39.679240Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:07:39.679288Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:07:39.679311Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:07:39.679525Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:07:39.679550Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:07:39.679562Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:07:39.679683Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:07:39.679702Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:07:39.679712Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:07:39.679819Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:07:39.679834Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:07:39.679843Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T23:07:39.679930Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:07:39.679946Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:07:39.679955Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T23:07:39.679990Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T23:07:39.684398Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439873171747365586:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:07:39.768652Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:39.768836Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:07:39.770491Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7439873150272528069:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:39.770547Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:39.771443Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:39.933032Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jd8fpc812f1kzmfskrc2nx8g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:40.091540Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jd8fpch55cmdaf15vmcm9wjz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:40.263550Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jd8fpcp8b7vw0q022509y9na, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:40.452697Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8fpcve53rnrrjmbfdkgmn3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:40.664712Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8fpd1a6jv829smsvyw81yy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:40.910779Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8fpd8pcxxk6npe2f50mrmr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:41.089118Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd8fpdfv60dkey3ztg4txk6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:41.403185Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jd8fpdn55236kndfd33g66g2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:41.758730Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jd8fpdzbfxsee4jhhfh1f2ze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:42.220605Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jd8fpea622d3njcdah15kyjt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Nzc1N2U3ZWUtZWVjMmI2ZmItMjdmYzUxNWItM2I2MWJjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts >> YdbMonitoring::SelfCheck [GOOD] >> YdbMonitoring::SelfCheckWithNodesDying >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] Test command err: 2024-11-21T23:07:15.833195Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873069000343923:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:15.833256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dec/r3tmp/tmpAdQ7bx/pdisk_1.dat 2024-11-21T23:07:16.403299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:16.403447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:16.419213Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:16.421702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10979, node 1 2024-11-21T23:07:16.520019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:16.520051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:16.520058Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:16.520152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:16.890037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:16.895052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:16.895138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:16.899702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2024-11-21T23:07:16.899919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:16.899940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:16.902709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:16.902983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:16.903010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:16.904541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:16.908966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230436951, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:16.909012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:16.909301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:16.910944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:16.911107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:16.911157Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:16.911255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:16.911297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:16.911401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:16.915431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:16.915478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:16.915494Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:16.915581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:21.112055Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873097122440112:2170];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dec/r3tmp/tmpZncrUv/pdisk_1.dat 2024-11-21T23:07:21.174612Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:21.254643Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:21.304293Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:21.304411Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:21.309400Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3165, node 4 2024-11-21T23:07:21.427051Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:21.427074Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:21.427082Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:21.427197Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:21.692646Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:21.693102Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:21.693122Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:21.695224Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:21.695383Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:21.695396Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:07:21.698381Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:21.698428Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:21.701372Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:21.702104Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:21.706040Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230441753, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:21.706081Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:21.706343Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:21.708459Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:21.708639Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:21.708687Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:21.708766Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:07:21.708827Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:07:21.708881Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:07:21.710591Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940 ... known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:42.719856Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:42.720417Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:42.720520Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2024-11-21T23:07:42.720778Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:42.720883Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:42.720979Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:42.728728Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.728788Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.728810Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:42.729110Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.729130Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.729141Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:42.729264Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.729280Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.729289Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T23:07:42.729400Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.729415Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.729425Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T23:07:42.729542Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.729559Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.729569Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T23:07:42.735841Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:42.747923Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230462788, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:42.747984Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230462788, at schemeshard: 72057594046644480 2024-11-21T23:07:42.748123Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:07:42.748271Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230462788, at schemeshard: 72057594046644480 2024-11-21T23:07:42.748344Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2024-11-21T23:07:42.748423Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230462788, at schemeshard: 72057594046644480 2024-11-21T23:07:42.748469Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 128 -> 240 2024-11-21T23:07:42.748548Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, HandleReply TEvOperationPlan: step# 1732230462788 2024-11-21T23:07:42.748602Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 128 -> 240 2024-11-21T23:07:42.751925Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:42.752552Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:42.752635Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:3 ProgressState 2024-11-21T23:07:42.752741Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:3 progress is 1/4 2024-11-21T23:07:42.752970Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2024-11-21T23:07:42.753023Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 2/4 2024-11-21T23:07:42.753105Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:2 ProgressState 2024-11-21T23:07:42.753149Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:2 progress is 3/4 2024-11-21T23:07:42.753232Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:42.753276Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 4/4 2024-11-21T23:07:42.753307Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:42.753346Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2024-11-21T23:07:42.753364Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:2 2024-11-21T23:07:42.753376Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:3 2024-11-21T23:07:42.753396Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 5, subscribers: 1 2024-11-21T23:07:42.759093Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.759169Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.759190Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:42.759486Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.759518Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.759531Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T23:07:42.759654Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.759672Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.759683Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:07:42.760268Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.760296Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.760309Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:07:42.760482Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.760505Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.760517Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T23:07:42.760556Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:07:42.772931Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439873186469862716:2302], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:07:42.836673Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:42.836866Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:07:42.843558Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin >> TFlatTableExecutorIndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_History_BTreeIndex >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T23:07:46.382748Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.382783Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.382811Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:46.383538Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:46.384274Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:46.394360Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.395241Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:46.396586Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.396604Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.396620Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:46.414660Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:46.415117Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:46.415469Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.415740Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:46.416035Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:46.417898Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.417921Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.417946Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:46.418212Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:46.434365Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:46.434599Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.438431Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:46.439205Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.439476Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:46.439558Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:46.439605Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:07:46.440833Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.440862Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.445066Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:46.445492Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:46.445956Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:46.446124Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.450616Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T23:07:46.451613Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:46.451772Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:07:46.452034Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:07:46.452204Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:07:46.452299Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:46.452323Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:46.452373Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:46.452514Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T23:07:46.452558Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:46.452579Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:07:46.452601Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:46.452720Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T23:07:46.452789Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:07:46.452807Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:07:46.452823Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:46.452888Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T23:07:46.452908Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:07:46.452933Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:07:46.452950Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:46.453029Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T23:07:46.459271Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.459298Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.459325Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:46.463221Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:46.463937Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:46.464089Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.465564Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T23:07:46.466547Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:46.466760Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:07:46.467187Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:07:46.467387Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:07:46.467497Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:46.467536Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:46.467559Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:46.467572Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:07:46.467602Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:46.467816Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2024-11-21T23:07:46.467907Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:07:46.467924Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:07:46.467938Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:07:46.467952Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:07:46.467972Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:46.468120Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2024-11-21T23:07:46.479378Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.479410Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.479428Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:46.490697Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:46.491282Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:46.491490Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:46.491781Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:46.492802Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:46.493505Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:46.494620Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T23:07:46.494720Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:46.498504Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:46.498549Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:46.498571Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T23:07:46.498589Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T23:07:46.498634Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T23:07:46.498662Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T23:07:46.498836Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:07:46.498991Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> TGRpcNewCoordinationClient::BasicMethods [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> TYqlDateTimeTests::SimpleOperations [GOOD] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestBusySession >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings >> Compression::WriteRAW ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T23:07:47.499260Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.499289Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.499358Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:47.499813Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:47.505645Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:47.505709Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.506594Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.506610Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.506626Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:47.506893Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:47.507151Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:47.507186Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.507869Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.507886Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.507902Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:47.508238Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:47.508283Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.508313Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.508746Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T23:07:47.509489Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.509505Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.509519Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:47.509920Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:07:47.509941Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.509967Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.510004Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T23:07:47.510866Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:07:47.510888Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:07:47.510907Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:47.511401Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:47.511807Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:47.520644Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:07:47.521039Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:47.521352Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T23:07:47.524766Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T23:07:47.525006Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:47.525044Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:47.525059Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:47.525073Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T23:07:47.525091Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T23:07:47.525103Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T23:07:47.525116Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T23:07:47.525129Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T23:07:47.525220Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T23:07:47.525232Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T23:07:47.525243Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T23:07:47.525254Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T23:07:47.525266Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T23:07:47.525285Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T23:07:47.525300Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T23:07:47.525311Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T23:07:47.525388Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T23:07:47.525402Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T23:07:47.525415Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T23:07:47.525428Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T23:07:47.525447Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T23:07:47.525460Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T23:07:47.525496Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T23:07:47.525513Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T23:07:47.525527Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T23:07:47.525547Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T23:07:47.525564Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T23:07:47.525587Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T23:07:47.525618Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T23:07:47.525635Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T23:07:47.525646Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T23:07:47.525658Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T23:07:47.525689Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T23:07:47.525705Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T23:07:47.525718Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T23:07:47.525739Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T23:07:47.525761Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T23:07:47.525773Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T23:07:47.525792Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T23:07:47.525807Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T23:07:47.525820Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T23:07:47.525832Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T23:07:47.525844Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T23:07:47.525861Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T23:07:47.525874Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T23:07:47.525886Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T23:07:47.525898Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T23:07:47.525914Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T23:07:47.525926Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T23:07:47.525938Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T23:07:47.525998Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T23:07:47.528118Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T23:07:47.528311Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T23:07:47.528352Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T23:07:47.528378Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T23:07:47.528404Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T23:07:47.528426Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T23:07:47.528440Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T23:07:47.528455Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T23:07:47.528471Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T23:07:47.528489Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T23:07:47.528503Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T23:07:47.528517Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T23:07:47.528529Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T23:07:47.528553Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T23:07:47.528566Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T23:07:47.528581Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T23:07:47.528602Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T23:07:47.528645Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T23:07:47.528661Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T23:07:47.528674Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T23:07:47.528696Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T23:07:47.528721Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T23:07:47.528736Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T23:07:47.528748Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T23:07:47.528761Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T23:07:47.528774Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T23:07:47.528786Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T23:07:47.528798Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T23:07:47.528811Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T23:07:47.528830Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T23:07:47.528841Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T23:07:47.528853Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T23:07:47.528865Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T23:07:47.528895Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T23:07:47.528910Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T23:07:47.528921Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T23:07:47.528941Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T23:07:47.528957Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T23:07:47.528970Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T23:07:47.528982Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T23:07:47.528994Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T23:07:47.529006Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T23:07:47.529017Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T23:07:47.529029Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T23:07:47.529041Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T23:07:47.529056Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T23:07:47.529068Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T23:07:47.529128Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T23:07:47.529150Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T23:07:47.529167Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T23:07:47.529180Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T23:07:47.529227Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T23:07:47.529371Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:07:47.530681Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.530705Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.530721Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:47.531008Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:47.531361Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:47.531506Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.533428Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:47.635309Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.635531Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:47.635582Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:47.635619Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:07:47.635671Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:07:47.836924Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T23:07:47.938588Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:07:47.938704Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:47.938825Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:07:47.939919Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.939937Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.946136Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:47.946593Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:47.947360Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:47.947506Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:47.950856Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:48.057692Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:48.058105Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:48.058187Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:48.058223Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:07:48.058294Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T23:07:48.058413Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:07:48.058603Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:07:48.058670Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:48.058761Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |79.9%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] Test command err: 2024-11-21T23:07:16.109910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873074888695198:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:16.110380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001de8/r3tmp/tmp4f5gJ7/pdisk_1.dat 2024-11-21T23:07:16.675323Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:16.682715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:16.682841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:16.690621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15725, node 1 2024-11-21T23:07:16.937575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:16.937599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:16.937605Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:16.937696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:17.220466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:17.226546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:17.226597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:17.229966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:17.230181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:17.230205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:17.232361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:17.232397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:17.233724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:17.242682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230437287, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:17.242733Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:17.243042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:17.245211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:17.245389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:17.245435Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:17.245515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:17.245567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:17.245626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:07:17.249546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:17.249604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:17.249620Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:17.249702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T23:07:17.249957Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:19.916275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/FooTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:19.916770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:19.917323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:19.917350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:19.923382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/FooTable 2024-11-21T23:07:19.923603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:19.923834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:19.923900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:19.925473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:19.925509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:19.925527Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:19.925734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:19.925761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:19.925774Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:19.927596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:19.940219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.940347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:19.943178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:20.030112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:20.030142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:20.030213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:20.035006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:20.043949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230440087, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:20.044010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230440087 2024-11-21T23:07:20.044138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:20.046442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:20.046773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:20.046832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:20.048659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:20.048689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:20.048706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:20.048943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:20.048974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:20.048987Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 2814749 ... st_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001de8/r3tmp/tmpv5N6Dr/pdisk_1.dat 2024-11-21T23:07:42.532709Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:42.645521Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28977, node 13 2024-11-21T23:07:42.744880Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:42.744978Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:42.768227Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:42.796413Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:42.796438Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:42.796454Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:42.796595Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:43.302841Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:43.303236Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:43.303264Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:43.315314Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:43.315558Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:43.315581Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:07:43.323538Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:43.323582Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:43.331499Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:43.332965Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:43.349412Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230463390, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:43.349460Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:43.349765Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:43.355391Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:43.355611Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:43.355680Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:43.355796Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:07:43.355852Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:07:43.355915Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:07:43.358328Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:07:43.358381Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:07:43.358429Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:43.358523Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T23:07:43.432616Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:43.432796Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 1 -> 2 2024-11-21T23:07:43.433184Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:43.433208Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:43.435500Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T23:07:43.435710Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:43.435965Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:43.436041Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T23:07:43.437764Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:43.437799Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:43.437823Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:43.438706Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:43.438741Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:43.438762Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:43.443261Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T23:07:43.443650Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:43.443729Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2024-11-21T23:07:43.477740Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 128 2024-11-21T23:07:43.482238Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:43.486248Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230463530, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:43.486307Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976715658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T23:07:43.486478Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2024-11-21T23:07:43.488443Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:43.488723Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:43.488785Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T23:07:43.488901Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T23:07:43.488960Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T23:07:43.489111Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715658, publications: 2, subscribers: 1 2024-11-21T23:07:43.489958Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:43.489995Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:43.490015Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:43.490229Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715658 2024-11-21T23:07:43.490245Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715658 2024-11-21T23:07:43.490255Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:43.490291Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::SimpleOperations [GOOD] Test command err: 2024-11-21T23:07:07.682537Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873035822069638:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:07.682583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e0a/r3tmp/tmpZA8Vdi/pdisk_1.dat 2024-11-21T23:07:08.313767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:08.313886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:08.333233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:08.371349Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2796, node 1 2024-11-21T23:07:08.484671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:08.507904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:08.526502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:08.542092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:08.558543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:07:08.763439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:08.763467Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:08.763476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:08.763586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:09.149027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.167546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:09.167642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.175775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:09.176128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:09.176144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:09.183214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:09.186673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:09.186709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:09.195408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.215537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230429258, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:09.215645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:09.215935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:09.227195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:09.227399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:09.227461Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:09.227569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:09.227622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:09.227682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:09.231042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:09.231105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:09.231127Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:09.231236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:11.700849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.701352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:11.701966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:11.701985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:11.715639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2024-11-21T23:07:11.715925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:11.716098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:11.716178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:11.717879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:11.717955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:11.717991Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:11.718288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:11.718306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:11.718334Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:11.723742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:11.730304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:11.730426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:11.735625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:11.795197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:11.795225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:11.795301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:11.803274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:11.814732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230431855, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:11.814803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230431855 2024-11-21T23:07:11.814905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:11.829656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:11.829977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:11.830062Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:11.831796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:11.831840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at ... 2230463187, at schemeshard: 72057594046644480 2024-11-21T23:07:43.146554Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 128 -> 240 2024-11-21T23:07:43.146687Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710660:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230463187, at schemeshard: 72057594046644480 2024-11-21T23:07:43.146733Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:1 128 -> 240 2024-11-21T23:07:43.146797Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710660:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230463187, at schemeshard: 72057594046644480 2024-11-21T23:07:43.146838Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:2 128 -> 240 2024-11-21T23:07:43.146910Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710660:3, HandleReply TEvOperationPlan: step# 1732230463187 2024-11-21T23:07:43.146963Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:3 128 -> 240 2024-11-21T23:07:43.152118Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:43.154217Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:43.154309Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:2 ProgressState 2024-11-21T23:07:43.154473Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:2 progress is 1/4 2024-11-21T23:07:43.154708Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:0 ProgressState 2024-11-21T23:07:43.154767Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:0 progress is 2/4 2024-11-21T23:07:43.154853Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:3 ProgressState 2024-11-21T23:07:43.154898Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:3 progress is 3/4 2024-11-21T23:07:43.154972Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:1 ProgressState 2024-11-21T23:07:43.155015Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:1 progress is 4/4 2024-11-21T23:07:43.155041Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2024-11-21T23:07:43.155077Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 2024-11-21T23:07:43.155091Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:2 2024-11-21T23:07:43.155103Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:3 2024-11-21T23:07:43.155123Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710660, publications: 5, subscribers: 1 2024-11-21T23:07:43.162223Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:07:43.162285Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:07:43.162305Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2024-11-21T23:07:43.162613Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:07:43.162637Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:07:43.162649Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:07:43.162785Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:07:43.162802Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:07:43.162810Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T23:07:43.162905Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:07:43.162942Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:07:43.162955Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T23:07:43.163063Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:07:43.163078Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:07:43.163088Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2024-11-21T23:07:43.163122Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710660, subscribers: 1 2024-11-21T23:07:43.173195Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439873191170514365:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T23:07:43.276757Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:43.277040Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:07:43.283777Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:43.469875Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jd8fpfma88cf5h53bp9ftmv3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:43.632822Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jd8fpg02bk1s15wm1cj3h3hc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:44.230315Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8fpg507s42yw939xp6qnt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:44.245586Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8fpg507s42yw939xp6qnt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:44.752865Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8fpgr7dyzy77v3kjsq23ez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:44.765524Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd8fpgr7dyzy77v3kjsq23ez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:44.908644Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jd8fph8meafg4tczqhv84t10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:45.084647Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jd8fphcq9pty99b84qty3b0v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:45.408063Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jd8fphj6ay8rmpnvfsxrcdkd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:45.543319Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jd8fphwjabm5jy1r69t2xbaw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:45.659017Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd8fpj0j5b7m5z8dw5vb2csy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:46.180105Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jd8fpj43985wqs82hk2da6vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:46.232505Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jd8fpj43985wqs82hk2da6vd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OWNiNmZkMjQtYWQxYTYzMmItOTgyZGRjYzUtZjM1NTBiYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> YdbYqlClient::TestDecimalFullStack [GOOD] >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> TFlatTableExecutorIndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_Groups_FlatIndex >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDecimalFullStack [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001ddc/r3tmp/tmpquQfIm/pdisk_1.dat 2024-11-21T23:07:22.484884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:22.852289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:22.852384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:22.856589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:22.866204Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13259, node 1 2024-11-21T23:07:22.918154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:22.918652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:22.918677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:22.918729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:22.918775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:07:23.125598Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:23.125619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:23.125639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:23.125724Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:23.598728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:23.606726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:23.606789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:23.608989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:23.609196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:23.609211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:23.612508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:23.612535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:23.614721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:23.628117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:23.637679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230443671, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:23.637728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:23.638015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:23.647460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:23.647626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:23.647698Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:23.647779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:23.647817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:23.647868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:23.651282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:23.651337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:23.651358Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:23.651458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:26.231377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873116739128657:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:26.231511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:26.234804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873116739128669:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:26.239382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:26.239611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:26.239643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T23:07:26.239713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:26.239733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T23:07:26.239794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:26.239847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710658:3, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:26.240147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 1 -> 128 2024-11-21T23:07:26.240442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:26.240458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:07:26.245461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE RESOURCE POOL, path: .metadata/workload_manager/pools/default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:26.245751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:26.246286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:26.246360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2024-11-21T23:07:26.246651Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:26.246763Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:26.246855Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:26.249591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:26.249639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:26.249663Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:26.249917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:26.249935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:26.249945Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:26.250058Z node 1 :FL ... Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:46.351910Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:46.351920Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T23:07:46.352004Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:46.352019Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:46.352030Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T23:07:46.362766Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T23:07:46.372572Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230466414, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:46.372619Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230466414, at schemeshard: 72057594046644480 2024-11-21T23:07:46.372744Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T23:07:46.372863Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230466414, at schemeshard: 72057594046644480 2024-11-21T23:07:46.372916Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T23:07:46.372962Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230466414, at schemeshard: 72057594046644480 2024-11-21T23:07:46.372999Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T23:07:46.373066Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732230466414 2024-11-21T23:07:46.373110Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T23:07:46.375375Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:46.375912Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:46.375985Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T23:07:46.376075Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T23:07:46.376241Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T23:07:46.376295Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T23:07:46.376401Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T23:07:46.376435Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T23:07:46.376513Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T23:07:46.376542Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T23:07:46.376564Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T23:07:46.376593Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T23:07:46.376609Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T23:07:46.376619Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T23:07:46.376636Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T23:07:46.380165Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:46.380209Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:46.380226Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:07:46.380485Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:46.380527Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:46.380539Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:07:46.380673Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:46.380689Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:46.380697Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:07:46.380815Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:46.380829Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:46.380837Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T23:07:46.380988Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:46.381009Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:46.381018Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T23:07:46.381062Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T23:07:46.400897Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439873203205575864:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:07:46.495401Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:46.495590Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:07:46.499604Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:46.633351Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439873181730738317:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:46.633513Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:46.779576Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fpjrt97awsbv7jykxnc59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM0ZDg5ZjUtMzc1Y2ZjMDQtYmM2YWQ4MWYtN2VhN2ViNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:46.978027Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fpk834k7474xz2hbnm1e3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM0ZDg5ZjUtMzc1Y2ZjMDQtYmM2YWQ4MWYtN2VhN2ViNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:47.203592Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fpke8br4qcb2c3bbt2mgk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM0ZDg5ZjUtMzc1Y2ZjMDQtYmM2YWQ4MWYtN2VhN2ViNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:47.453201Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fpkp2asxgahnb419dn57a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM0ZDg5ZjUtMzc1Y2ZjMDQtYmM2YWQ4MWYtN2VhN2ViNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:47.566852Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8fpkwn40kdb4xcx8ps7ftq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM0ZDg5ZjUtMzc1Y2ZjMDQtYmM2YWQ4MWYtN2VhN2ViNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:48.887767Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd8fpm0kcgjsbv62s349ds2v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM0ZDg5ZjUtMzc1Y2ZjMDQtYmM2YWQ4MWYtN2VhN2ViNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:48.901955Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd8fpm0kcgjsbv62s349ds2v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZDM0ZDg5ZjUtMzc1Y2ZjMDQtYmM2YWQ4MWYtN2VhN2ViNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |79.9%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] Test command err: 2024-11-21T23:07:06.221376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873031395554789:2248];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:06.221431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e25/r3tmp/tmpWtrTq8/pdisk_1.dat 2024-11-21T23:07:06.812202Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:06.824946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:06.825058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:06.836896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2208, node 1 2024-11-21T23:07:06.927084Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:06.927105Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:06.927112Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:06.927207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:07.349450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.355132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:07.355203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.359235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:07.359441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:07.359455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:07.361775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:07.361806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T23:07:07.370792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.371789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:07.384782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230427424, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:07.384843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:07.385161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:07.387628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.387857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.387911Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:07.388051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:07.388095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:07.388149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:07.393842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:07.393914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:07.393939Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:07.394051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:1590 2024-11-21T23:07:07.783881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.784313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:07.784334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.793062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T23:07:07.793235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.793585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.793676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T23:07:07.793696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:07.795500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:07.795576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:07.795599Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:07.795832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:07.795859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:07.795868Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 waiting... 2024-11-21T23:07:07.803120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:07.803152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:07.806757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:07.807502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:07.819958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230427865, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:07.819998Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet 72057594046644480 2024-11-21T23:07:07.820197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:07:07.822004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.822233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.822291Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:07.822415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:07:07.822452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:07.822495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T23:07:07.823527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:07.823567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:07.823580Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:07.823776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:07.823809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:07.823819Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:07.823864Z node 1 :FLAT_TX_SCHE ... Execute, stepId: 1732230461570, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:41.524819Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet 72057594046644480 2024-11-21T23:07:41.525026Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:07:41.528000Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:41.528400Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:41.528482Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:41.528650Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:07:41.528700Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:41.528757Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T23:07:41.535395Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:41.535458Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:41.535483Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:41.535765Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:41.535789Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:41.535802Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:41.535856Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:07:41.545016Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.545812Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:41.545853Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.548402Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T23:07:41.548645Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T23:07:41.554739Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:07:41.557379Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.557702Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.557746Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 2 -> 3 2024-11-21T23:07:41.560450Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:42.062723Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439873184540073019:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:42.062802Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:42.077404Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:42.077535Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:42.084862Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T23:07:42.086062Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:42.558261Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710659:0 HandleReply TEvConfigureStatus operationId:281474976710659:0 at schemeshard:72057594046644480 TClient is connected to server localhost:22890 2024-11-21T23:07:42.830597Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T23:07:42.989406Z node 13 :FLAT_TX_SCHEMESHARD INFO: Transaction 281474976710659 reset current state at schemeshard 72057594046644480 because pipe to tablet 72075186224037888 disconnected 2024-11-21T23:07:42.989821Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T23:07:42.990427Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:07:47.064527Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7439873184540073019:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:47.064638Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> YdbScripting::BasicV1 [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |79.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> YdbYqlClient::RenameTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T23:07:49.822589Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.822616Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.822636Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:49.839954Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:49.851832Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:49.865054Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.870664Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:49.871587Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:49.872029Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:49.872197Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T23:07:49.872330Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:49.872418Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:49.872442Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T23:07:49.872479Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:07:49.872497Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:07:49.873903Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.873921Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.873938Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:49.885612Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:49.886120Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:49.886324Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.887150Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T23:07:49.888213Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:49.888488Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:07:49.896614Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:07:49.896865Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:07:49.897010Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:49.897060Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:49.897102Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:49.897298Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T23:07:49.897344Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:49.897363Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:07:49.897385Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:49.897505Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T23:07:49.897606Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:07:49.897624Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:07:49.897641Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:49.897710Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T23:07:49.897750Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:07:49.897822Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:07:49.897852Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:49.898034Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T23:07:49.903731Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.903761Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.903783Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:49.916046Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:49.921933Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:49.922132Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:49.922625Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T23:07:49.923602Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:49.923806Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:07:49.928940Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:07:49.929161Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:07:49.929276Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:49.929322Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:49.929442Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2024-11-21T23:07:49.929485Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:49.929507Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:49.929605Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2024-11-21T23:07:49.929644Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:49.929664Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:49.929728Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2024-11-21T23:07:49.929750Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:07:49.929765Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataRecei ... uster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:51.710793Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2024-11-21T23:07:51.851030Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:07:51.851070Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:07:51.851107Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:51.866696Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:51.878691Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:51.878965Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:07:51.879335Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T23:07:52.049363Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T23:07:52.050491Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:52.050534Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:52.050552Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:52.050567Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T23:07:52.050587Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T23:07:52.050603Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T23:07:52.050618Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T23:07:52.050635Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T23:07:52.050658Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T23:07:52.050675Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T23:07:52.050728Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T23:07:52.050892Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:52.053179Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2024-11-21T23:07:52.075418Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.075447Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.075473Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.078316Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.086649Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:52.086826Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.089050Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:52.089491Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T23:07:52.095743Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.095773Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.095794Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.100641Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.101387Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:52.101567Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.103312Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.106564Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:52.106718Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:52.106769Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:07:52.106935Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T23:07:52.528045Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.528081Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.528102Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.528564Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:52.528603Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.528632Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.529650Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008087s 2024-11-21T23:07:52.530290Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.530744Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:52.530892Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.532580Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.532598Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.532613Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.532929Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:52.532961Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.532989Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.533034Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009929s 2024-11-21T23:07:52.533418Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.534178Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:52.534276Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.535247Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.535265Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.535279Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.536033Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:07:52.536077Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.536101Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.536149Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.204283s 2024-11-21T23:07:52.536568Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.536850Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:52.536910Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.537503Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.537514Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.537524Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.537726Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:07:52.537751Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.537768Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.537809Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.177025s 2024-11-21T23:07:52.538133Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.538435Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:52.538482Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.539355Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.539374Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.539392Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.539675Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.542080Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:52.552333Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.552623Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T23:07:52.552656Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.552667Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.552713Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.216161s 2024-11-21T23:07:52.553014Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:07:52.554144Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.554165Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.554182Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.566679Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.567208Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:52.567396Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.569061Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:52.670521Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.670815Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:52.670878Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:52.670913Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:07:52.670976Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:07:52.773806Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:52.773970Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:07:52.775249Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.775270Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.775290Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.782496Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.786851Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:52.787256Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.792436Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:52.895706Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.898599Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:52.898683Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:52.898726Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:07:52.898818Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T23:07:52.898937Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:07:52.901618Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:07:52.901729Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:52.901887Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] >> TFlatTableExecutorIndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_Groups_BTreeIndex >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] Test command err: 2024-11-21T23:07:25.945431Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873114156077086:2161];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd8/r3tmp/tmpTtV3OR/pdisk_1.dat 2024-11-21T23:07:26.267968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:26.579101Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:26.581784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:26.581872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:26.595122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21309, node 1 2024-11-21T23:07:26.968771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:26.968791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:26.968800Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:26.968887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:27.449241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:27.454623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:27.454671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:27.459402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:27.459558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:27.459570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:27.465004Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:27.465041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:27.466673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:27.471364Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:27.486687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230447521, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:27.486744Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:27.487019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:27.490471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:27.490646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:27.490692Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:27.490762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:27.490795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:27.490851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:27.496599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:27.496659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:27.496676Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:27.496766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:27.625282Z node 1 :TX_PROXY ERROR: Access denied for bad@builtin with access CreateTable to path Root 2024-11-21T23:07:31.638557Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873137275099741:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:31.641412Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd8/r3tmp/tmpNTxty9/pdisk_1.dat 2024-11-21T23:07:31.840262Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10244, node 4 2024-11-21T23:07:31.972110Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:31.972210Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:32.006373Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:32.019980Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:32.019998Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:32.020004Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:32.020098Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:32.227755Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:32.228170Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:32.228204Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:32.230332Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:32.230522Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:32.230536Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:32.233444Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:32.233470Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting...2024-11-21T23:07:32.235308Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:32.238917Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230452281, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:32.238945Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:32.239175Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:32.241147Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:32.241288Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:32.241330Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:32.241391Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:32.241423Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:32.241459Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:07:32.242796Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generati ... 024-11-21T23:07:47.059673Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd8/r3tmp/tmp6KoNqN/pdisk_1.dat 2024-11-21T23:07:47.351869Z node 13 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:47.385323Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:47.385413Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:47.389494Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27476, node 13 2024-11-21T23:07:47.586082Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:47.586106Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:47.586115Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:47.586241Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:47.881995Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:47.882451Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:47.882471Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:47.887591Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:47.887815Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:47.887835Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:47.895599Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:47.895632Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:47.896649Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:47.898115Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:47.907726Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230467947, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:47.907766Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:47.907981Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:47.911333Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:47.911532Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:47.911591Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:47.911684Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:47.911732Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:47.911783Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:47.913081Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:47.913139Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:47.913158Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:47.913247Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:48.023703Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateKesus Propose, path: /Root/node1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:48.023842Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:48.024126Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:48.024154Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:48.030604Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE KESUS, path: /Root/node1 2024-11-21T23:07:48.030768Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:48.030951Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:48.031005Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateKesus, at tablet72057594046644480 2024-11-21T23:07:48.032977Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:48.033047Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:48.033064Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:48.033290Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:48.033305Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:48.033324Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:48.034272Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:48.057588Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:48.057697Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:48.138462Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:48.145441Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:48.158231Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230468199, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:48.158284Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateKesus TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T23:07:48.158415Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:07:48.170003Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:48.170305Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:48.170387Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:48.170506Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:07:48.170562Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:48.170780Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T23:07:48.173074Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:48.173116Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:48.173134Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:48.173370Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:48.173416Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:48.173430Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:48.173473Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbScripting::BasicV1 [GOOD] Test command err: 2024-11-21T23:07:16.207491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873073182877626:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:16.207623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001de6/r3tmp/tmpzl8Ei0/pdisk_1.dat 2024-11-21T23:07:16.719879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:16.722552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:16.722966Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:16.730705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12760, node 1 2024-11-21T23:07:16.826825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:16.826841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:16.826855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:16.827080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:17.147722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:17.153923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:17.153997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:17.169914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:17.170118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:17.170130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:17.174067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:17.174946Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:17.174973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:17.178890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:17.191423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230437231, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:17.191483Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:17.191798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:17.194030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:17.194198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:17.194244Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:17.194320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:17.194357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:17.194427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:17.197847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:17.197888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:17.197906Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:17.197986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:19.644952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ListingObjects, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:19.645798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:19.647364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:19.647402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:19.652954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ListingObjects 2024-11-21T23:07:19.653179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:19.653387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:19.653455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:19.654989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:19.655056Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:19.655073Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:19.655265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:19.655286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:19.655295Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:19.659365Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:19.661451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.661752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.661953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.662108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.662267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.662410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.673380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.675135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.675264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.675355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.675506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.675622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.675712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.675800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.675910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.676033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.676134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.676231Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:19.676364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreat ... :TPropose operationId#281474976715659:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:50.669957Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.670789Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.670829Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:07:50.671155Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.671180Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.671194Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T23:07:50.671324Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.671360Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.671374Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T23:07:50.671501Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.671520Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.671534Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T23:07:50.671642Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.671661Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.671674Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T23:07:50.674898Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T23:07:50.683424Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230470726, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:50.683485Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230470726, at schemeshard: 72057594046644480 2024-11-21T23:07:50.683620Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T23:07:50.683772Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230470726, at schemeshard: 72057594046644480 2024-11-21T23:07:50.683838Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:1 128 -> 240 2024-11-21T23:07:50.683903Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230470726, at schemeshard: 72057594046644480 2024-11-21T23:07:50.683946Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:2 128 -> 240 2024-11-21T23:07:50.684021Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976715659:3, HandleReply TEvOperationPlan: step# 1732230470726 2024-11-21T23:07:50.684075Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:3 128 -> 240 2024-11-21T23:07:50.687395Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:50.688053Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:50.688137Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:3 ProgressState 2024-11-21T23:07:50.688247Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:3 progress is 1/4 2024-11-21T23:07:50.688477Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:1 ProgressState 2024-11-21T23:07:50.688547Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:1 progress is 2/4 2024-11-21T23:07:50.688641Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T23:07:50.688697Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 3/4 2024-11-21T23:07:50.688778Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:2 ProgressState 2024-11-21T23:07:50.688818Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:2 progress is 4/4 2024-11-21T23:07:50.688848Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T23:07:50.688885Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:1 2024-11-21T23:07:50.688900Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:2 2024-11-21T23:07:50.688911Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:3 2024-11-21T23:07:50.688932Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 5, subscribers: 1 2024-11-21T23:07:50.694336Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.694417Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.694438Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:07:50.694721Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.694743Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.694753Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:07:50.694870Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.694890Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.694903Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:07:50.695016Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.695038Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.695050Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T23:07:50.695156Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:07:50.695174Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:07:50.695186Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T23:07:50.695226Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 2024-11-21T23:07:50.702643Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439873218869191027:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:07:50.785952Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976715660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:07:50.786189Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:07:50.790750Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:07:50.947751Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fpjgp3f6psba0v7sm8fyp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTk4YWEwZDUtMzNhM2RhMDEtMzdkYjJhYTUtNWQyZTY0OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:51.101706Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fpq9w98rdb3g6cg0entyc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2Y5ODc1NTQtMzE3NTlhYTctNzY3M2Y0YjMtM2ZlNjE3OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:51.112616Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230471132, txId: 281474976715662] shutting down >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T23:07:52.198980Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.199023Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.199054Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.199714Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.200344Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:52.213154Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.213798Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:52.214758Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:52.215258Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:52.215438Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T23:07:52.215570Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:52.215699Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:52.215729Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T23:07:52.215767Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:07:52.215790Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:07:52.217231Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.217261Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.217280Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.217585Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.217965Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:52.218087Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.218284Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T23:07:52.219213Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:52.219449Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:07:52.219761Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:07:52.220004Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:07:52.220120Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:52.220156Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:52.220193Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:52.220326Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T23:07:52.220372Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:52.220391Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:07:52.220408Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:52.220511Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T23:07:52.220614Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:07:52.220634Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:07:52.220670Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:52.220747Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T23:07:52.220769Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:07:52.220785Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:07:52.220804Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:52.220898Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T23:07:52.222159Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.222183Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.222203Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:52.222512Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:52.222855Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:52.223274Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:52.223510Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T23:07:52.224465Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:52.224689Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:07:52.225893Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:07:52.226103Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:07:52.226206Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:52.226251Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:52.226374Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2024-11-21T23:07:52.226434Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:52.226452Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:52.226539Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2024-11-21T23:07:52.226568Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:52.226586Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:52.226669Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2024-11-21T23:07:52.226694Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:07:52.226711Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataRecei ... uster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:53.623238Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2024-11-21T23:07:53.815708Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:07:53.815746Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:07:53.815912Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:53.826672Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:53.833336Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:53.833587Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:07:53.834123Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T23:07:53.963765Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T23:07:53.963976Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:53.964012Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:53.964032Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:53.964048Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T23:07:53.964067Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T23:07:53.964084Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T23:07:53.964099Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T23:07:53.964117Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T23:07:53.964139Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T23:07:53.964155Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T23:07:53.964217Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T23:07:53.964410Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:53.966612Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2024-11-21T23:07:53.975826Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:53.975862Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:53.975883Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:53.976180Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:53.988698Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:53.988904Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:53.990625Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:53.991096Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T23:07:53.992896Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:53.992923Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:53.992947Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:54.002659Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:54.004275Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:54.004483Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.005067Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.005307Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:54.005418Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:54.005464Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:07:54.005608Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2024-11-21T23:07:22.254825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873100139796976:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:22.258443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd9/r3tmp/tmpGcNvCQ/pdisk_1.dat 2024-11-21T23:07:22.997005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:22.997092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:23.006360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:23.030648Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1112, node 1 2024-11-21T23:07:23.211204Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:23.266166Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:23.326971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:23.354295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:23.358862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:23.358895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:23.358942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:23.358992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:07:23.448093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:23.448112Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:23.448118Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:23.448202Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:23.915780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:23.951563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:23.951639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:23.954990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:23.955186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:23.955198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:23.962710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:23.963372Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:23.963390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:23.973537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:23.991212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230444028, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:23.991246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:23.991479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:23.997299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:23.997450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:23.997497Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:23.997565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:23.997605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:23.997651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:24.003568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:24.003618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:24.003632Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:24.003724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:28.517485Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873125740632934:2057];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:28.517972Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd9/r3tmp/tmpciJosr/pdisk_1.dat 2024-11-21T23:07:28.715258Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:28.748084Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:28.748160Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:28.753459Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11322, node 4 2024-11-21T23:07:28.985584Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:28.985605Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:28.985612Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:28.985707Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:29.287532Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:29.287853Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:29.287884Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:29.289651Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:29.289790Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:29.289805Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:29.291147Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:29.291169Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:29.292305Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:29.294990Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230449341, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:29.295016Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:29.295232Z ... tion: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:07:51.893946Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873224503940121:3530] txid# 281474976715672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715672 TabletId# 72057594046644480} 2024-11-21T23:07:51.894071Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873224503940121:3530] txid# 281474976715672 HANDLE EvClientConnected 2024-11-21T23:07:51.894259Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:07:51.894382Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:51.907129Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2024-11-21T23:07:51.908091Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873224503940121:3530] txid# 281474976715672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715672} 2024-11-21T23:07:51.908128Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873224503940121:3530] txid# 281474976715672 SEND to# [10:7439873224503940120:2362] Source {TEvProposeTransactionStatus txid# 281474976715672 Status# 53} 2024-11-21T23:07:51.910277Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:07:51.910385Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:07:51.910445Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:07:51.910511Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:07:51.944476Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230471979, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:51.959549Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715672, done: 0, blocked: 1 2024-11-21T23:07:51.969401Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715672:0 2024-11-21T23:07:51.969480Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715672, publications: 2, subscribers: 1 2024-11-21T23:07:51.971782Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:07:51.971869Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:07:51.971878Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:07:51.971919Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:07:51.972869Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715672, subscribers: 1 2024-11-21T23:07:52.004202Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037890 not found 2024-11-21T23:07:52.004429Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T23:07:52.007697Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01jd8fpra75ygz0yfsdg3cxmvz, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58718, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2024-11-21T23:07:52.008013Z node 10 :TX_PROXY DEBUG: actor# [10:7439873194439167055:2111] Handle TEvProposeTransaction 2024-11-21T23:07:52.008030Z node 10 :TX_PROXY DEBUG: actor# [10:7439873194439167055:2111] TxId# 281474976715673 ProcessProposeTransaction 2024-11-21T23:07:52.008061Z node 10 :TX_PROXY DEBUG: actor# [10:7439873194439167055:2111] Cookie# 0 userReqId# "" txid# 281474976715673 SEND to# [10:7439873228798907504:3608] 2024-11-21T23:07:52.010235Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873228798907504:3608] txid# 281474976715673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:58718" 2024-11-21T23:07:52.010294Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873228798907504:3608] txid# 281474976715673 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:07:52.011606Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873228798907504:3608] txid# 281474976715673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:07:52.011699Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873228798907504:3608] HANDLE EvNavigateKeySetResult, txid# 281474976715673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:07:52.011735Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873228798907504:3608] txid# 281474976715673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2024-11-21T23:07:52.012052Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873228798907504:3608] txid# 281474976715673 HANDLE EvClientConnected 2024-11-21T23:07:52.012237Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:07:52.012498Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:52.015335Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2024-11-21T23:07:52.016057Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873228798907504:3608] txid# 281474976715673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715673} 2024-11-21T23:07:52.016090Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873228798907504:3608] txid# 281474976715673 SEND to# [10:7439873228798907503:2366] Source {TEvProposeTransactionStatus txid# 281474976715673 Status# 53} 2024-11-21T23:07:52.019267Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:07:52.019367Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:07:52.019379Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:07:52.019421Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:07:52.032201Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230472077, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:52.043344Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715673, done: 0, blocked: 1 2024-11-21T23:07:52.049158Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:07:52.049261Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:07:52.049270Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:07:52.049306Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:07:52.052975Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2024-11-21T23:07:52.063866Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b1c80] received request Name# Request ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.064171Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b1680] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.064352Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e3a80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.064542Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b1080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.064710Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e3480] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.064888Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b2280] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.065214Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b2880] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.065423Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e2e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.065577Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e2880] received request Name# FillNode ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.065722Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b5280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.065891Z node 10 :GRPC_SERVER DEBUG: [0x51a000093c80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.066054Z node 10 :GRPC_SERVER DEBUG: [0x51a00000fc80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.066214Z node 10 :GRPC_SERVER DEBUG: [0x51a000092a80] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.066361Z node 10 :GRPC_SERVER DEBUG: [0x51b0000b2180] received request Name# KeyValue ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.066545Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b4680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.066698Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b4080] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.066850Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b3a80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.067047Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b3480] received request Name# SqsRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.067250Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b2e80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.067394Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e4c80] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.067548Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e4680] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.067685Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e4080] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.067848Z node 10 :GRPC_SERVER DEBUG: [0x51a000023480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.067999Z node 10 :GRPC_SERVER DEBUG: [0x51a0000a0880] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2024-11-21T23:07:52.095181Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2024-11-21T23:07:52.098962Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> Compression::WriteRAW >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbYqlClient::AlterTableAddIndex >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2024-11-21T23:07:07.427726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873034897032937:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:07.427958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e0b/r3tmp/tmpDgzLlH/pdisk_1.dat 2024-11-21T23:07:07.917874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:07.918005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:07.922613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:07.938232Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29077, node 1 2024-11-21T23:07:07.963055Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:07.963078Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:07.963870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:07:07.964476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:07.964617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:07:07.978985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.980063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:07.980094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:07.980144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:07.980200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:07:08.057947Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:08.057973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:08.057980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:08.058071Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:08.556265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:08.562961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:08.563013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:08.567232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:08.567607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:08.567628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:08.572336Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:08.572365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:08.574168Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:08.576878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:08.577901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230428621, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:08.577942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:08.578221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:08.580612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:08.580784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:08.580827Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:08.580912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:08.580953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:08.580993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:08.584517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:08.584568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:08.584587Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:08.584675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:08.753044Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:59922) has now valid token of root@builtin 2024-11-21T23:07:08.837022Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T23:07:12.277429Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873056144540556:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:12.277473Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e0b/r3tmp/tmpt0I78Y/pdisk_1.dat 2024-11-21T23:07:12.456047Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:12.484252Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:12.484341Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:12.487507Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25753, node 4 2024-11-21T23:07:12.577266Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:12.577288Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:12.577295Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:12.577398Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:12.811268Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:12.811617Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:12.811638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:12.815050Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:12.815250Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:12.815265Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T23:07:12.817232Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:12.818015Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07 ... PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:41.143210Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.143716Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:41.143743Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.151774Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:41.151982Z node 19 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:41.152001Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:07:41.154699Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:41.154731Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:41.156681Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.161791Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230461206, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:41.161846Z node 19 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:41.162222Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:41.165948Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:41.166208Z node 19 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:41.166286Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:41.166461Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:07:41.166520Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:07:41.166591Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2024-11-21T23:07:41.168677Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:07:41.168734Z node 19 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:07:41.168764Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:41.168867Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 2024-11-21T23:07:41.171553Z node 19 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1732237660284847 Nodes { NodeId: 1024 Host: "localhost" Port: 24727 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1732237660284847 } Nodes { NodeId: 19 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 20 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 21 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2024-11-21T23:07:46.174554Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7439873201559595681:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:46.174611Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e0b/r3tmp/tmpPFnMf3/pdisk_1.dat 2024-11-21T23:07:46.356596Z node 22 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:46.398257Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:46.398369Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:46.409267Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15761, node 22 2024-11-21T23:07:46.520640Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:46.520664Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:46.520673Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:46.520784Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:46.836791Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:46.837169Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:46.837200Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:46.841470Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:46.841675Z node 22 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:46.841698Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:46.846302Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:46.846332Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:46.847390Z node 22 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:46.855136Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:46.867649Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230466911, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:46.867708Z node 22 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:46.867985Z node 22 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:46.871781Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:46.871971Z node 22 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:46.872029Z node 22 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:46.872173Z node 22 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:46.872222Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:46.872287Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:46.874004Z node 22 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:46.874054Z node 22 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:46.874076Z node 22 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:46.874150Z node 22 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 Trying to register node 2024-11-21T23:07:47.152888Z node 22 :TICKET_PARSER ERROR: Ticket 5EB1F2F66F18BE7CA39A5BE8B8F35712F426F51C: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } >> TFlatTableExecutorIndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutorIndexLoading::Scan_Groups_BTreeIndex_Empty >> TFlatTableExecutorIndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutorKeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutorMoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutorMoveTableData::TestMoveSnapshotFollower >> TFlatTableExecutorMoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutorReboot::TestSchemeGcAfterReassign >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> TFlatTableExecutorReboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutorRejectProbability::MaxedOutRejectProbability >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T23:07:56.384240Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.384263Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.384285Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.387662Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.388304Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.400668Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.403917Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:56.407536Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.407561Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.407581Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.408295Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.410315Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.410503Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.410838Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:56.411216Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:56.412412Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.412442Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.412462Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.426637Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.438708Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.438898Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.442631Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:56.443408Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.443705Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:56.443789Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:56.443834Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:07:56.445054Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.445075Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.445099Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.448639Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.449183Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.449356Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.450570Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T23:07:56.451498Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:56.451675Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:07:56.454185Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:07:56.454458Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:07:56.454571Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:56.455487Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:56.455528Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:56.455678Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2024-11-21T23:07:56.455721Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:56.455744Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:07:56.455766Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:56.455869Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2024-11-21T23:07:56.455950Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:07:56.455971Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:07:56.455986Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:07:56.456072Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2024-11-21T23:07:56.456104Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:07:56.456123Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:07:56.456143Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:56.456230Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2024-11-21T23:07:56.457667Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.457696Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.457732Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.457996Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.458383Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.458527Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.458760Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T23:07:56.459697Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:56.459921Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:07:56.460185Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:07:56.460381Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:07:56.460500Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:56.460542Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:56.460568Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:56.460582Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:07:56.460617Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:56.460820Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2024-11-21T23:07:56.460909Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:07:56.460923Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:07:56.460933Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:07:56.460943Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:07:56.460957Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:07:56.461051Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2024-11-21T23:07:56.462312Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.462333Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.462350Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.462623Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.470812Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.471023Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.471488Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:56.472558Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:56.473292Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:07:56.479061Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T23:07:56.479204Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:56.479354Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:56.479386Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:56.479403Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T23:07:56.479420Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T23:07:56.479461Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T23:07:56.479481Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T23:07:56.479654Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:07:56.479837Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> TFlatTableExecutorRejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutorRejectProbability::SomeRejectProbability >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2024-11-21T23:06:45.511026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872941094095237:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:45.511301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002371/r3tmp/tmpJWXBOq/pdisk_1.dat 2024-11-21T23:06:46.395813Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:46.409820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:46.409947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:46.419817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24165, node 1 2024-11-21T23:06:46.666942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:46.666962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:46.666969Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:46.667057Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:47.201728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:47.223229Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:06:47.311471Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:50.088846Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:50.089013Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:50.089035Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:50.097101Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDRmMWZkZGEtYmZkZDEyMTQtN2NiYzdiNGMtZTViZDM4Y2I=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDRmMWZkZGEtYmZkZDEyMTQtN2NiYzdiNGMtZTViZDM4Y2I= 2024-11-21T23:06:50.097585Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:06:50.116622Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872962568932204:2302], Start check tables existence, number paths: 2 2024-11-21T23:06:50.116817Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDRmMWZkZGEtYmZkZDEyMTQtN2NiYzdiNGMtZTViZDM4Y2I=, ActorId: [1:7439872962568932206:2303], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:50.120324Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872962568932204:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:50.120393Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872962568932204:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:50.120435Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872962568932204:2302], Successfully finished 2024-11-21T23:06:50.120497Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:50.161740Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872962568932223:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:50.167731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:50.170641Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872962568932223:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2024-11-21T23:06:50.172728Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872962568932223:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:06:50.186772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872962568932223:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:06:50.248988Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872962568932223:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:50.253148Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872962568932223:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:06:50.255922Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE= 2024-11-21T23:06:50.256255Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:06:50.256270Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:06:50.256338Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE=, ActorId: [1:7439872962568932282:2304], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:50.256468Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE=, ActorId: [1:7439872962568932282:2304], ActorState: ReadyState, TraceId: 01jd8fmw0g5vz58yr1r7mz9kes, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439872962568932281:2338] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T23:06:50.256515Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872962568932284:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:50.256591Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439872962568932282:2304], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE= 2024-11-21T23:06:50.256636Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872962568932285:2306], Database: /Root, Start database fetching 2024-11-21T23:06:50.258540Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872962568932285:2306], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T23:06:50.258735Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872962568932284:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:50.258783Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T23:06:50.258821Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T23:06:50.258837Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T23:06:50.259127Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872962568932296:2308], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T23:06:50.259210Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439872962568932295:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE=, Start pool fetching 2024-11-21T23:06:50.259239Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872962568932297:2309], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:50.261143Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872962568932297:2309], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:50.261210Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872962568932296:2308], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T23:06:50.261312Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439872962568932295:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE=, Pool info successfully resolved 2024-11-21T23:06:50.261380Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE= 2024-11-21T23:06:50.261462Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872962568932296:2308], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7439872962568932282:2304], session id: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE= 2024-11-21T23:06:50.261517Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YWMzODRiZjktOWUzN2U4NGEtMzlmY2NjNjQtZjIwMTI2YjE= 2024-11-21T23:06:50.261571Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables reques ... 3:07:53.529890Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:54.082597Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:07:54.085985Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTViN2YxYTgtNmEzZGQ3NmQtNWI5OGZlYTgtMmRhNTQ0ODc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTViN2YxYTgtNmEzZGQ3NmQtNWI5OGZlYTgtMmRhNTQ0ODc= 2024-11-21T23:07:54.087196Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439873235775298199:2302], Start check tables existence, number paths: 2 2024-11-21T23:07:54.087297Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTViN2YxYTgtNmEzZGQ3NmQtNWI5OGZlYTgtMmRhNTQ0ODc=, ActorId: [6:7439873235775298200:2303], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:07:54.088965Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:07:54.088994Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:07:54.089023Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:07:54.102139Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439873235775298199:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:07:54.103184Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439873235775298199:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:07:54.103257Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7439873235775298199:2302], Successfully finished 2024-11-21T23:07:54.103361Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:07:54.108903Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439873235775298226:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:07:54.113669Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:07:54.115224Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439873235775298226:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2024-11-21T23:07:54.115443Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439873235775298226:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:07:54.128602Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439873235775298226:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:07:54.208981Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439873235775298226:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:07:54.212197Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439873235775298226:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:07:54.221310Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE= 2024-11-21T23:07:54.221692Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:07:54.221711Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:07:54.221780Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [6:7439873235775298284:2305], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:07:54.221935Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [6:7439873235775298284:2305], ActorState: ReadyState, TraceId: 01jd8fptfd4x55bxzqc5sjs8q6, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7439873235775298283:2337] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T23:07:54.222003Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7439873235775298284:2305], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE= 2024-11-21T23:07:54.222070Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439873235775298286:2306], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:07:54.222163Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439873235775298287:2307], Database: /Root, Start database fetching 2024-11-21T23:07:54.226076Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439873235775298286:2306], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:07:54.226156Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T23:07:54.226177Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T23:07:54.226383Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439873235775298297:2308], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T23:07:54.226455Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439873235775298287:2307], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T23:07:54.226508Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T23:07:54.226556Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7439873235775298298:2309], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, Start pool fetching 2024-11-21T23:07:54.226593Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439873235775298299:2310], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:07:54.232023Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439873235775298299:2310], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:07:54.232135Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439873235775298297:2308], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T23:07:54.232269Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7439873235775298298:2309], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, Pool info successfully resolved 2024-11-21T23:07:54.232380Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE= 2024-11-21T23:07:54.232474Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE= 2024-11-21T23:07:54.232582Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [6:7439873235775298284:2305], ActorState: ExecuteState, TraceId: 01jd8fptfd4x55bxzqc5sjs8q6, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2024-11-21T23:07:54.232736Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [6:7439873235775298284:2305], ActorState: ExecuteState, TraceId: 01jd8fptfd4x55bxzqc5sjs8q6, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2024-11-21T23:07:54.232989Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7439873235775298284:2305], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE= 2024-11-21T23:07:54.233039Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [6:7439873235775298284:2305], ActorState: CleanupState, TraceId: 01jd8fptfd4x55bxzqc5sjs8q6, EndCleanup, isFinal: 1 2024-11-21T23:07:54.233162Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [6:7439873235775298284:2305], ActorState: CleanupState, TraceId: 01jd8fptfd4x55bxzqc5sjs8q6, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7439873210005494016:2256] 2024-11-21T23:07:54.233193Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [6:7439873235775298284:2305], ActorState: unknown state, TraceId: 01jd8fptfd4x55bxzqc5sjs8q6, Cleanup temp tables: 0 2024-11-21T23:07:54.233320Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ODdmOTIyOWEtMjUyOWE5ZTItYWE3ZThmOTUtM2IzYTQ3MTE=, ActorId: [6:7439873235775298284:2305], ActorState: unknown state, TraceId: 01jd8fptfd4x55bxzqc5sjs8q6, Session actor destroyed 2024-11-21T23:07:54.276002Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTViN2YxYTgtNmEzZGQ3NmQtNWI5OGZlYTgtMmRhNTQ0ODc=, ActorId: [6:7439873235775298200:2303], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:07:54.276055Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTViN2YxYTgtNmEzZGQ3NmQtNWI5OGZlYTgtMmRhNTQ0ODc=, ActorId: [6:7439873235775298200:2303], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:07:54.276083Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTViN2YxYTgtNmEzZGQ3NmQtNWI5OGZlYTgtMmRhNTQ0ODc=, ActorId: [6:7439873235775298200:2303], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:07:54.276114Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTViN2YxYTgtNmEzZGQ3NmQtNWI5OGZlYTgtMmRhNTQ0ODc=, ActorId: [6:7439873235775298200:2303], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:07:54.276198Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTViN2YxYTgtNmEzZGQ3NmQtNWI5OGZlYTgtMmRhNTQ0ODc=, ActorId: [6:7439873235775298200:2303], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T23:07:56.431800Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.431823Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.431842Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.438785Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.442637Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:56.442726Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.450379Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.450422Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.450441Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.450799Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.453121Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:07:56.453169Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.454168Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.454201Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.454221Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.454539Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:56.454591Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.454621Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.455270Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T23:07:56.458385Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.458420Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.458434Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.458757Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:07:56.458784Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.458804Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.458847Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T23:07:56.460051Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:07:56.460084Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:07:56.460106Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.460399Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.460971Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.480081Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:07:56.480960Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:56.481288Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T23:07:56.488140Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T23:07:56.488483Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:56.488508Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:07:56.488520Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:07:56.488531Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T23:07:56.488546Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T23:07:56.488556Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T23:07:56.488565Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T23:07:56.488575Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T23:07:56.488656Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T23:07:56.488674Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T23:07:56.488686Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T23:07:56.488696Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T23:07:56.488704Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T23:07:56.488714Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T23:07:56.488722Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T23:07:56.488730Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T23:07:56.488799Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T23:07:56.488809Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T23:07:56.488825Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T23:07:56.488835Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T23:07:56.488857Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T23:07:56.488867Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T23:07:56.488875Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T23:07:56.488883Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T23:07:56.488892Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T23:07:56.488902Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T23:07:56.488915Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T23:07:56.488937Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T23:07:56.488966Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T23:07:56.488982Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T23:07:56.488997Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T23:07:56.489013Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T23:07:56.489059Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T23:07:56.489087Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T23:07:56.489117Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T23:07:56.489145Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T23:07:56.489173Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T23:07:56.489187Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T23:07:56.489200Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T23:07:56.489214Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T23:07:56.489229Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T23:07:56.489244Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T23:07:56.489258Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T23:07:56.489274Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T23:07:56.489288Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T23:07:56.489301Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T23:07:56.489313Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T23:07:56.489326Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T23:07:56.489338Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T23:07:56.489350Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T23:07:56.489410Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T23:07:56.491687Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T23:07:56.491848Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T23:07:56.491876Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T23:07:56.491897Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T23:07:56.491910Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T23:07:56.491932Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T23:07:56.491942Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T23:07:56.491952Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T23:07:56.491962Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T23:07:56.491977Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T23:07:56.491987Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T23:07:56.491996Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T23:07:56.492005Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T23:07:56.492020Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T23:07:56.492029Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T23:07:56.492037Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T23:07:56.492059Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T23:07:56.492087Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T23:07:56.492120Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T23:07:56.492134Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T23:07:56.492149Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T23:07:56.492173Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T23:07:56.492187Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T23:07:56.492199Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T23:07:56.492208Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T23:07:56.492215Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T23:07:56.492223Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T23:07:56.492230Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T23:07:56.492238Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T23:07:56.492252Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T23:07:56.492261Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T23:07:56.492269Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T23:07:56.492282Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T23:07:56.492307Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T23:07:56.492316Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T23:07:56.492322Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T23:07:56.492330Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T23:07:56.492337Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T23:07:56.492344Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T23:07:56.492351Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T23:07:56.492362Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T23:07:56.492382Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T23:07:56.492393Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T23:07:56.492401Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T23:07:56.492410Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T23:07:56.492421Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T23:07:56.492431Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T23:07:56.492481Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T23:07:56.492504Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T23:07:56.492516Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T23:07:56.492524Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T23:07:56.492557Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T23:07:56.492682Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:07:56.493794Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.493812Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.493832Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.494094Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.494453Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.494615Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.495034Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:56.596114Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.596402Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:56.596454Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:56.596485Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:07:56.596551Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:07:56.802694Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T23:07:56.905805Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:07:56.906802Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:56.907742Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:07:56.910512Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.910533Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.910552Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:56.911028Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:56.911524Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:56.911696Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:56.912574Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:57.013559Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:57.014559Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:07:57.014622Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:57.014660Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:07:57.014749Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2024-11-21T23:07:57.014856Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:07:57.015077Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:07:57.015145Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:07:57.015237Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TFlatTableExecutorRejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutorRejectProbability::ZeroRejectProbability >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TFlatTableExecutorRejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutorRejectProbability::ZeroRejectProbabilityMultipleTables >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TFlatTableExecutorRejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorSetResourceProfile >> TFlatTableExecutorResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorReuseStaticMemory >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TFlatTableExecutorResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorTxDataLimitExceeded >> TFlatTableExecutorResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorRequestPages >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse >> TFlatTableExecutorResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorRequestMemory >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection >> TNodeBrokerTest::SingleDomainModeBannedIds >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> TFlatTableExecutorResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorRequestMemoryFollower |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> TFlatTableExecutorResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorMemoryLimitExceeded >> TFlatTableExecutorResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorTxDataGC >> TFlatTableExecutorResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutorResourceProfile::TestExecutorTxHoldOnRelease [GOOD] |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b (1, aaa) | 1 3 88b (1, b) | 2 6 86b (2, NULL) | 3 9 86b (2, ccx) | 3 11 86b (2, cxz) + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > (1, b) | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > (2, NULL) | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > (2, ccx) | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b (1, aaa) | 1 3 88b (1, baaaa) | 2 6 86b (2, aaa) | 3 9 86b (2, ccx) | 3 11 86b (2, cxz) + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > (1, baaaa) | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > (2, aaa) | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > (2, ccx) | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, ab) | 2 2 42b (1, ac) | 3 3 42b (1, b) | 4 4 42b (1, bb) | 5 5 42b (2, NULL) | 6 6 42b (2, ab) | 7 7 42b (2, ac) | 8 8 42b (2, b) | 9 9 42b (2, bb) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, ab) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, ac) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, b) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bb) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, NULL) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, ab) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, ac) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, b) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bb) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, aba) | 2 2 42b (1, aca) | 3 3 42b (1, baa) | 4 4 42b (1, bba) | 5 5 42b (2, aaa) | 6 6 42b (2, aba) | 7 7 42b (2, aca) | 8 8 42b (2, baa) | 9 9 42b (2, bba) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, aba) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, aca) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, baa) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bba) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, aaa) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, aba) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, aca) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, baa) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bba) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0,1), [1,2), [2,3), [3,4), [4,5), [5,7), [7,8), [8,9), [9,9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, ab) | 2 2 42b (1, ac) | 3 3 42b (1, b) | 4 4 42b (1, bb) | 5 5 42b (2, NULL) | 6 6 42b (2, ab) | 7 7 42b (2, ac) | 8 8 42b (2, b) | 9 9 42b (2, bb) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, ab) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, ac) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, b) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bb) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, NULL) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, ab) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, ac) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, b) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bb) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b (1, aaa) | 1 1 42b (1, aba) | 2 2 42b (1, aca) | 3 3 42b (1, baa) | 4 4 42b (1, bba) | 5 5 42b (2, aaa) | 6 6 42b (2, aba) | 7 7 42b (2, aca) | 8 8 42b (2, baa) | 9 9 42b (2, bba) | 9 9 42b (2, bba) + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > (1, aba) | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > (1, aca) | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > (1, baa) | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > (1, bba) | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > (2, aaa) | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > (2, aba) | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > (2, aca) | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > (2, baa) | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > (2, bba) | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 41b (ccccccd) | 1 1 41b (ccccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccccd) | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 43b (ccccccd) | 1 1 43b (ccccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccccd) | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 40b (cccccd) | 1 1 40b (cccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccccd) | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 42b (cccccd) | 1 1 42b (cccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccccd) | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 39b (ccccd) | 1 1 39b (ccccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccd) | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 41b (ccccd) | 1 1 41b (ccccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (ccccd) | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 38b (cccd) | 1 1 38b (cccd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccd) | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 40b (cccd) | 1 1 40b (cccddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (cccd) | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 35b (d) | 1 1 35b (d) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b (cccccc) | 1 1 37b (d) | 1 1 37b (ddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b () | 1 1 35b (d) | 1 1 35b (d) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > (d) | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b () | 1 1 37b (d) | 1 1 37b (ddd) + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 re ... et 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,4), [6,8), [8,12), [14,16), [16,18), [20,28), [32,34), [34,38), [38,39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b (0, 1) | 2 2 66b (0, 4) | 4 4 82b (0, 7) | 8 6 66b (0, 10) | 11 8 66b (1, 3) | 14 10 82b (1, 6) | 20 12 66b (1, 8) | 23 14 66b (2, NULL) | 26 16 82b (2, 4) | 36 18 66b (2, 7) | 39 20 66b (2, 10) | 42 22 82b (3, 3) | 48 24 66b (3, 6) | 53 26 66b (3, 8) | 58 28 82b (4, NULL) | 64 30 66b (4, 4) | 67 32 66b (4, 7) | 70 34 82b (4, 10) | 82 36 66b (5, 3) | 87 38 66b (5, 6) | 87 39 66b (5, 7) + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,4), [6,8), [8,12), [14,16), [16,18), [20,28), [32,34), [34,38), [38,39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b (0, 1) | 2 2 66b (0, 4) | 4 4 82b (0, 7) | 8 6 66b (0, 10) | 11 8 66b (1, 3) | 14 10 82b (1, 6) | 20 12 66b (1, 8) | 23 14 66b (2, NULL) | 26 16 82b (2, 4) | 36 18 66b (2, 7) | 39 20 66b (2, 10) | 42 22 82b (3, 3) | 48 24 66b (3, 6) | 53 26 66b (3, 8) | 58 28 82b (4, NULL) | 64 30 66b (4, 4) | 67 32 66b (4, 7) | 70 34 82b (4, 10) | 82 36 66b (5, 3) | 87 38 66b (5, 6) | 87 39 66b (5, 7) + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > (0, 4) | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > (0, 7) | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > (0, 10) | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > (1, 3) | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > (1, 6) | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > (1, 8) | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > (2, NULL) | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > (2, 4) | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > (2, 7) | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > (2, 10) | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > (3, 3) | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > (3, 6) | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > (3, 8) | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > (4, NULL) | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > (4, 4) | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > (4, 7) | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > (4, 10) | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > (5, 3) | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > (5, 6) | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |80.0%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ErasureBrandNew::Block42_restore [GOOD] >> ErasureBrandNew::Block42_restore_benchmark >> PartitionStats::Collector >> PartitionStats::Collector [GOOD] >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::RetryOperationSync |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2024-11-21T23:07:32.987909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873141459337414:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:32.988412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd6/r3tmp/tmpmhMxic/pdisk_1.dat 2024-11-21T23:07:33.526805Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:33.560510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:33.560621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:33.574206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24801, node 1 2024-11-21T23:07:33.902997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:33.903018Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:33.903026Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:33.903121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:34.365348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.371485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:34.371550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.379500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:34.379709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:34.380397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:34.386284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:34.386316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:34.390543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:34.391940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:34.395751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230454437, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:34.395795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:34.396068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:34.397983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:34.398153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:34.398204Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:34.398289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:34.398336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:34.398417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:34.409036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:34.409115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:34.409137Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:34.409257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:37.460008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873162934174785:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:37.460127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:37.986564Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873141459337414:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:37.986633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:38.356768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.357384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:38.357989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:38.358029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.363209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/TestTable 2024-11-21T23:07:38.363457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:38.363686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.363769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:38.365346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:38.365389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:38.365410Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:38.365678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:38.365699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:38.365713Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:38.366122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:38.378359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:38.378494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:38.395285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:38.498751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:38.498789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:38.498885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:38.501129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:38.505488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230458553, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:38.505564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230458553 2024-11-21T23:07:38.505747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:38.512459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:38.512784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.512843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:38.514945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480 ... HARD INFO: TCreateParts opId# 281474976710658:2 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:58.687158Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:58.689202Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:58.689268Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:58.689292Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:58.695778Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:58.695859Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:58.695882Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:58.696116Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:58.696137Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:58.696154Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T23:07:58.696260Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:58.696280Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:58.696291Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2024-11-21T23:07:58.696461Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:58.701251Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:2 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:58.701344Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 2 -> 3 2024-11-21T23:07:58.701619Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:58.701646Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:58.707833Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:2 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:58.708520Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:58.778813Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:2 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:58.778837Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:58.778910Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 3 -> 128 2024-11-21T23:07:58.779207Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:58.779216Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:58.779247Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:58.783799Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:2 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:58.784109Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:58.789438Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230478832, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:58.789482Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230478832 2024-11-21T23:07:58.789591Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:58.789666Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTableIndex TPropose operationId#281474976710658:1 HandleReply TEvOperationPlan, step: 1732230478832, at schemeshard: 72057594046644480 2024-11-21T23:07:58.789782Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2024-11-21T23:07:58.789869Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:2 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230478832 2024-11-21T23:07:58.789894Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 128 -> 129 2024-11-21T23:07:58.792057Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:58.792684Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:58.792750Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2024-11-21T23:07:58.792835Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 1/3 2024-11-21T23:07:58.793062Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:2 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:58.793206Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:58.796607Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:58.796649Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:58.796666Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:58.796931Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:58.796951Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:58.796961Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:58.797075Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:58.797091Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:58.797104Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 2 2024-11-21T23:07:58.797228Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:58.797242Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:58.797249Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T23:07:58.802215Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710658 Step: 1732230478832 OrderId: 281474976710658 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1357 } } 2024-11-21T23:07:58.802549Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976710658 Step: 1732230478832 OrderId: 281474976710658 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1075 } } 2024-11-21T23:07:58.803272Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:07:58.803301Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:58.803324Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 129 -> 240 2024-11-21T23:07:58.804887Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:07:58.804911Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T23:07:58.804939Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 129 -> 240 2024-11-21T23:07:58.809397Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:58.809501Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 2/3 2024-11-21T23:07:58.809731Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:2 ProgressState 2024-11-21T23:07:58.809783Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:2 progress is 3/3 2024-11-21T23:07:58.809886Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:58.810044Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2024-11-21T23:07:58.810059Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:2 2024-11-21T23:07:58.907752Z node 10 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable'unknown table >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::ExecuteQueryCache |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2024-11-21T23:08:00.827662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:00.827751Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:00.980833Z node 1 :NODE_BROKER ERROR: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2024-11-21T23:08:01.011716Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2024-11-21T23:08:01.032784Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2024-11-21T23:08:01.389692Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2024-11-21T23:08:01.464210Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs >> YdbYqlClient::CreateTableWithMESettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorResourceProfile::TestExecutorTxHoldOnRelease [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:37.211475Z 00000.032 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.034 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.035 II| FAKE_ENV: Starting storage for BS group 0 00000.048 II| FAKE_ENV: Starting storage for BS group 1 00000.048 II| FAKE_ENV: Starting storage for BS group 2 00000.048 II| FAKE_ENV: Starting storage for BS group 3 00000.075 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:29:2061]) priority=200 resources={1, 0} 00000.075 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:29:2061]) to queue queue_background_compaction 00000.075 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:29:2061]) from queue queue_background_compaction 00000.075 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:29:2061]) to queue queue_background_compaction 00000.075 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 50.000000 (insert task gen0-table-101-tablet-1 (1 by [1:29:2061])) 00000.096 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:29:2061]) (release resources {1, 0}) 00000.096 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 50.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:29:2061])) 00000.098 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.098 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.098 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.098 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.098 II| FAKE_ENV: DS.1 gone, left {1347b, 10}, put {1347b, 10} 00000.098 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.098 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.098 II| FAKE_ENV: All BS storage groups are stopped 00000.098 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.098 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:37.332358Z 00000.027 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.027 II| FAKE_ENV: Starting storage for BS group 0 00000.028 II| FAKE_ENV: Starting storage for BS group 1 00000.028 II| FAKE_ENV: Starting storage for BS group 2 00000.028 II| FAKE_ENV: Starting storage for BS group 3 00000.029 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.029 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.043 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.044 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 ...inserting rows 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 4832b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.045 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.046 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ...compacting 00000.058 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Full, forced state None, forced mode Full 00000.059 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.059 II| TABLET_EXECUTOR: Leader{1:2:4} starting compaction 00000.059 II| TABLET_EXECUTOR: Leader{1:2:5} starting Scan{1 on 101, Compact{1.2.4, eph 1}} 00000.059 II| TABLET_EXECUTOR: Leader{1:2:5} started compaction 1 00000.059 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.107 II| TABLET_EXECUTOR: Leader{1:2:5} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 4, product {1 parts epoch 2} done 00000.107 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.108 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 8 for step 4 00000.108 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 3 for step 5 00000.108 DD| TABLET_EXECUTOR: Leader{1:2:6} switch applied on followers, step 5 ...waiting until compacted ...making snapshot 00000.108 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot 00000.109 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.109 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot} hope 1 -> done Change{4, redo 64b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.109 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxMakeSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.109 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 2, edge 2/8589934597, generation 0 00000.109 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.109 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{3 on 101, Compact{1.2.7, eph 1}} 00000.109 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 3 00000.109 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 3 generation 0 00000.110 DD| TABLET_EXECUTOR: Leader{1:2:8} cache hit for data request from: [2:48:2084], pageCollection [1:2:4:1:12288:161:0] 00000.113 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 3 on TGenCompactionParams{101: gen 0 epoch 2, 1 parts} step 7, product {1 parts epoch 2} done 00000.113 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 3, generation 0 00000.113 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 6 00000.113 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 8 for step 7 00000.114 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 3 for step 8 00000.114 DD| TABLET_EXECUTOR: Leader{1:2:9} switch applied on followers, step 8 ...waiting for snapshot to complete ...borrowing snapshot 00000.119 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot 00000.119 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.119 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.119 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxBorrowSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.119 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 ...stopping the source tablet 00000.120 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 3377b +(3, 3962b), 9 trc, -3962b acc} ...starting the destination tablet 00000.127 II| TABLET_EXECUTOR: Leader{2:2:0} activating executor 00000.128 II| TABLET_EXECUTOR: LSnap{2:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.128 DD| TABLET_EXECUTOR: Leader{2:2:2} commited cookie 2 for step 1 00000.129 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema 00000.129 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.129 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema} hope 1 -> done Change{2, redo 0b alter 218b annex 0, ~{ } -{ }, 0 gb} 00000.130 DD| TABLET_EXECUTOR: Leader{2:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxInitColdSchema} release 4194304b of static, Memory{0 dyn 0} 00000.130 DD| TABLET_EXECUTOR: Leader{2:2:3} commited cookie 1 for step 2 ...loaning snapshot 00000.135 DD| TABLET_EXECUTOR: Leader{2:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot 00000.135 DD| TABLET_EXECUTOR: Leader{2:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.135 DD| TABLET_EXECUTOR: Leader{2:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.135 DD| TABLET_EXECUTOR: Leader{2:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxLoanSnapshot} release 4194304b of static, Memory{0 dyn 0} 00000.136 DD| TABLET_EXECUTOR: Leader{2:2:4} commited cookie 1 for step 3 ...checking table only has cold parts 00000.136 DD| TABLET_EXECUTOR: Leader{2:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts 00000.136 DD| TABLET_EXECUTOR: Leader{2:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.136 DD| TABLET_EXECUTOR: Leader{2:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.136 DD| TABLET_EXECUTOR: Leader{2:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableCold::TTxCheckOnlyColdParts} release 4194304b of static, Memory{0 dyn 0} ...starting scan 00000.137 II| TABLET_EXECUTOR: Leader{2:2:5} starting Scan{2 on 101, DummyScan} 00000.137 DD| TABLET_EXECUTOR: Leader{0:0:-} sending TEvGet batch 161 bytes, 161 total, blobs: { [1:2:7:1:12288:161:0] } 00000.137 DD| TABLET_EXECUTOR: Leader{2:2:5} commited cookie 8 for step 4 ...restarting tablet, iteration 1 00000.143 II| TABLET_EXECUTOR: Leader{2:2:5} suiciding, Waste{2:0, 256b +(0, 0b), 4 trc, -0b acc} 00000.151 DD| TABLET_EXECUTOR: Leader{2:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [2:2:1:1:28672:35:0] } 00000.151 DD| TABLET_EXECUTOR: Leader{2:3:-} sending TEvGet batch 358 bytes, 358 total, blobs: { [2:2:3:1:36864:38:0], [2:2:2:1:8192:218:0], [2:2:3:1:32768:102:0] } 00000.152 II| TABLET_EXECUTOR: Leader{2:3:0} activating executor 00000.152 II| TABLET_EXECUTOR: LSnap{2:3, on 3:1, 178b, wa ... ig MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.011 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.011 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 302b annex 0, ~{ } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} hope 1 -> done Change{2, redo 0b alter 15b annex 0, ~{ } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxSetResourceProfile} release 4194304b of static, Memory{0 dyn 0} 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} took 1024b of static mem, Memory{1024 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 1 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 19456b requested for data (20480b in total) 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release 1024b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release tx data 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} request Res{1 20480b} type small_transaction 00000.016 DD| RESOURCE_BROKER: Submitted new unknown task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061]) priority=5 resources={0, 20480} 00000.016 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.016 DD| RESOURCE_BROKER: Allocate resources {0, 20480} for task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061]) from queue queue_default 00000.016 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.016 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.002384 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])) 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} acquired dyn mem Res{1 20480b}, Memory{0 dyn 20480} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 2 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 10240b requested for data (30720b in total) 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release tx data 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} captured Res{1 20480b} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} update resource task 1 releasing 0b, Memory{0 dyn 20480} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} request Res{2 40960b} type small_transaction 00000.017 DD| RESOURCE_BROKER: Update task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061]) (priority=5 type=small_transaction resources={0, 20480} resubmit=0) 00000.017 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.017 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.000000 to 0.002384 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])) 00000.017 DD| RESOURCE_BROKER: Submitted new unknown task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061]) priority=5 resources={0, 40960} 00000.017 EE| RESOURCE_BROKER: Assigning waiting task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.017 DD| RESOURCE_BROKER: Allocate resources {0, 40960} for task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061]) from queue queue_default 00000.017 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])' of unknown type 'small_transaction' to default queue 00000.017 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.002384 to 0.007153 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])) 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} acquired dyn mem Res{2 40960b}, Memory{0 dyn 61440} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 3 -> retry Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} found attached Res{1 20480b} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} moving tx data from attached Res{1 20480b} to Res{2 ...} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} touch new 0b, 0b lo load (0b in total), 1b requested for data (20481b in total) 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} hope 4 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} release Res{2 61440b}, Memory{0 dyn 0} 00000.017 DD| RESOURCE_BROKER: Update task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061]) (priority=5 type=medium_transaction resources={0, 61440} resubmit=0) 00000.017 EE| RESOURCE_BROKER: Assigning in-fly task 'Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])' of unknown type 'medium_transaction' to default queue 00000.017 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.002384 to 0.009537 (insert task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])) 00000.018 DD| RESOURCE_BROKER: Finish task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061]) (release resources {0, 20480}) 00000.018 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.009537 to 0.007153 (remove task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (1 by [49:29:2061])) 00000.018 DD| RESOURCE_BROKER: Finish task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061]) (release resources {0, 61440}) 00000.018 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_default from 0.007153 to 0.000000 (remove task Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutorResourceProfile::TTxRequestMemory} at tablet 1 (2 by [49:29:2061])) 00000.018 II| TABLET_EXECUTOR: Leader{1:2:4} suiciding, Waste{2:0, 317b +(0, 0b), 3 trc, -0b acc} 00000.019 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.019 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.019 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.019 II| FAKE_ENV: DS.0 gone, left {180b, 3}, put {200b, 4} 00000.019 II| FAKE_ENV: DS.1 gone, left {352b, 3}, put {352b, 3} 00000.019 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.019 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.019 II| FAKE_ENV: All BS storage groups are stopped 00000.020 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.020 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 6 Left 67}, stopped |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::ZeroRows >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> YdbYqlClient::TestConstraintViolation [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] Test command err: 2024-11-21T23:06:42.519555Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872927147280233:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:42.519600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023be/r3tmp/tmpTga5rl/pdisk_1.dat 2024-11-21T23:06:43.279291Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13228, node 1 2024-11-21T23:06:43.479284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:43.479390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:43.480559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:43.566653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:43.566672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:43.566679Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:43.566773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11895 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:44.008396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:44.051139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:06:44.128459Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:47.008542Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:47.011631Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzBlY2E5NWItNzRiNTcxZTQtYjM1YTQwZGUtZTgzYTUzYWU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzBlY2E5NWItNzRiNTcxZTQtYjM1YTQwZGUtZTgzYTUzYWU= 2024-11-21T23:06:47.012648Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:47.012676Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:47.012705Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:06:47.012731Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872948622117102:2301], Start check tables existence, number paths: 2 2024-11-21T23:06:47.019471Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzBlY2E5NWItNzRiNTcxZTQtYjM1YTQwZGUtZTgzYTUzYWU=, ActorId: [1:7439872948622117103:2302], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:47.019819Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872948622117102:2301], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:47.019865Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872948622117102:2301], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:47.019913Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872948622117102:2301], Successfully finished 2024-11-21T23:06:47.019985Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:47.054769Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872948622117129:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:47.070790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:47.073655Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872948622117129:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2024-11-21T23:06:47.076539Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872948622117129:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:06:47.084351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872948622117129:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:06:47.150536Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872948622117129:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:47.154951Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872948622117129:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:06:47.170168Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T23:06:47.170193Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:06:47.170536Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872948622117189:2305], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T23:06:47.171025Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzBlY2E5NWItNzRiNTcxZTQtYjM1YTQwZGUtZTgzYTUzYWU=, ActorId: [1:7439872948622117103:2302], ActorState: ReadyState, TraceId: 01jd8fms005tg78rbamzgq80rp, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T23:06:47.174864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872948622117189:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:47.175006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:47.515419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:06:47.521703Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872927147280233:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:47.521787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:06:47.529223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:06:47.531585Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NzBlY2E5NWItNzRiNTcxZTQtYjM1YTQwZGUtZTgzYTUzYWU=, ActorId: [1:7439872948622117103:2302], ActorState: ExecuteState, TraceId: 01jd8fms005tg78rbamzgq80rp, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7439872948622117198:2302] WorkloadServiceCleanup: 0 2024-11-21T23:06:47.533111Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzBlY2E5NWItNzRiNTcxZTQtYjM1YTQwZGUtZTgzYTUzYWU=, ActorId: [1:7439872948622117103:2302], ActorState: CleanupState, TraceId: 01jd8fms005tg78rbamzgq80rp, EndCleanup, isFinal: 0 2024-11-21T23:06:47.533182Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzBlY2E5NWItNzRiNTcxZTQtYjM1YTQwZGUtZTgzYTUzYWU=, ActorId: [1:7439872948622117103:2302], ActorState: CleanupState, TraceId: 01jd8fms005tg78rbamzgq80rp, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7439872927147280241:2256] 2024-11-21T23:06:47.563549Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTBlNzQ1NmEtZDhhZWJkNzQtNDVhMzRmOGItMTJmNGM0NzE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTBlNzQ1NmEtZDhhZWJkNzQtNDVhMzRmOGItMTJmNGM0NzE= 2024-11-21T23:06:47.563843Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:06:47.563884Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTBlNzQ1NmEtZDhhZWJkNzQtNDVhMzRmOGItMTJmNGM0NzE=, ActorId: [1:7439872948622117228:2308], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:47.564432Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTBlNzQ1NmEtZDhhZWJkNzQtNDVhMzRmOGItMTJmNGM0NzE=, ActorId: [1:7439872948622117228:2308], ActorState: ReadyState, TraceId: 01jd8fmscbbtgm9s5c456ra1ce, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7439872948622117227:2358] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T23:06:47.564494Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872948622117230:2309], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:47.564571Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439872948622117228:2308], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NTBlNzQ1NmEtZDhhZWJkNzQtNDVhMzRmOGItMTJmNGM0NzE= 2024-11-21T23:06:47.564610Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] Act ... orState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, ExecutePhyTx, tx: 0x0000000000000000 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 1 2024-11-21T23:08:00.199780Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, TExecPhysicalRequest, add DeferredEffect to Transaction, current Transactions.size(): 1 2024-11-21T23:08:00.199817Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, TExecPhysicalRequest, tx has commit locks 2024-11-21T23:08:00.199920Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, Sending to Executer TraceId: 0 8 2024-11-21T23:08:00.200019Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, Created new KQP executer: [7:7439873264896189653:2497] isRollback: 0 2024-11-21T23:08:00.221858Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:08:00.222070Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, txInfo Status: Committed Kind: ReadWrite TotalDuration: 58.983 ServerDuration: 58.779 QueriesCount: 2 2024-11-21T23:08:00.222228Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:00.222309Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:00.222348Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, EndCleanup, isFinal: 0 2024-11-21T23:08:00.222436Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq09168enexqahkry5hev, Sent query response back to proxy, proxyRequestId: 31, proxyId: [7:7439873204766646119:2177] 2024-11-21T23:08:00.223173Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, TxId: 2024-11-21T23:08:00.223307Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T23:08:00.224014Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ReadyState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, received request, proxyRequestId: 32 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [7:7439873264896189661:2506] database: /Root databaseId: /Root pool id: default 2024-11-21T23:08:00.224052Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ReadyState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, request placed into pool from cache: default 2024-11-21T23:08:00.224127Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ReadyState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, Sending CompileQuery request 2024-11-21T23:08:00.226172Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, ExecutePhyTx, tx: 0x000050C0002A82D8 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:00.226268Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, Sending to Executer TraceId: 0 8 2024-11-21T23:08:00.226350Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, Created new KQP executer: [7:7439873264896189665:2497] isRollback: 0 2024-11-21T23:08:00.240613Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T23:08:00.241104Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, ExecutePhyTx, tx: 0x000050C0002A8518 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:00.242174Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:08:00.242326Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, txInfo Status: Committed Kind: ReadOnly TotalDuration: 16.314 ServerDuration: 16.174 QueriesCount: 2 2024-11-21T23:08:00.242477Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:00.242543Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:00.242573Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, EndCleanup, isFinal: 0 2024-11-21T23:08:00.242627Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ExecuteState, TraceId: 01jd8fq0az37q93hayc5yqkvr3, Sent query response back to proxy, proxyRequestId: 32, proxyId: [7:7439873204766646119:2177] 2024-11-21T23:08:00.243248Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, TxId: 2024-11-21T23:08:00.243353Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, TxId: 2024-11-21T23:08:00.243876Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7439873234831417788:2311], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2024-11-21T23:08:00.243917Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:00.243952Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:00.243980Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:08:00.244007Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:00.244079Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjczNGQzMjYtYTg5YmE4NDgtYTJkMTE5NmUtZjEwYWEwMmQ=, ActorId: [7:7439873264896189628:2497], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] Test command err: 2024-11-21T23:06:45.393481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872938701031313:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:45.393783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00239d/r3tmp/tmp2AdQOE/pdisk_1.dat 2024-11-21T23:06:46.257630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:46.370205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:46.370304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:46.374937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:46.427628Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63418, node 1 2024-11-21T23:06:46.610924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:46.610941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:46.610948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:46.611035Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:47.098643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:47.185927Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:49.542572Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:49.542685Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872955880900975:2302], Start check tables existence, number paths: 2 2024-11-21T23:06:49.548859Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmVjNzlkYjktZGExZmE4NTQtOTk4NjE2ZWUtZmRmZjIzNzY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MmVjNzlkYjktZGExZmE4NTQtOTk4NjE2ZWUtZmRmZjIzNzY= 2024-11-21T23:06:49.549377Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmVjNzlkYjktZGExZmE4NTQtOTk4NjE2ZWUtZmRmZjIzNzY=, ActorId: [1:7439872955880900991:2303], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:49.549537Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:49.549557Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:49.549591Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:06:49.549647Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872955880900975:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:49.549707Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872955880900975:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:49.549738Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872955880900975:2302], Successfully finished 2024-11-21T23:06:49.559077Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:49.583197Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872955880900993:2299], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:49.592423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:49.595269Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872955880900993:2299], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2024-11-21T23:06:49.597196Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872955880900993:2299], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:06:49.621792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872955880900993:2299], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:06:49.700306Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872955880900993:2299], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:49.712776Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872955880900993:2299], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:06:49.717101Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw 2024-11-21T23:06:49.717396Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw, ActorId: [1:7439872955880901051:2304], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:49.717575Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw, ActorId: [1:7439872955880901051:2304], ActorState: ReadyState, TraceId: 01jd8fmvfn89ga4d8j0tefzg0x, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439872955880901050:2336] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T23:06:49.717620Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:06:49.717631Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:06:49.717680Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439872955880901051:2304], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw 2024-11-21T23:06:49.717725Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872955880901053:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:49.717829Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872955880901054:2306], Database: /Root, Start database fetching 2024-11-21T23:06:49.718783Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872955880901054:2306], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T23:06:49.718850Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T23:06:49.718887Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439872955880901061:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw, Start pool fetching 2024-11-21T23:06:49.718921Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872955880901062:2308], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:49.719727Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872955880901062:2308], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:49.719803Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872955880901053:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:49.719824Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439872955880901061:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw, Pool info successfully resolved 2024-11-21T23:06:49.719888Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T23:06:49.719902Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T23:06:49.720113Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw 2024-11-21T23:06:49.720181Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872955880901067:2309], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T23:06:49.720229Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872955880901067:2309], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7439872955880901051:2304], session id: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw 2024-11-21T23:06:49.720269Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872955880901067:2309], DatabaseId: /Root, PoolId: sample_pool_id, Reply continue success to [1:7439872955880901051:2304], session id: ydb://session/3?node_id=1&id=OWY1NGM2YjgtMzM2ZjgzNi01OTEwOTJmLWI4MTg5NzAw, local in flight: 1 2024-11-21T23:06:49.720296Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id= ... ECUTE with SUCCESS status 2024-11-21T23:08:00.191826Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=NTM1MjEzZDUtMTJhMTlhZTAtOGQwMDZmY2ItMWQ0MjdlZTU=, ActorId: [5:7439873263761483887:2771], ActorState: ExecuteState, TraceId: 01jd8fq056d84tjk0v2yv6dgm9, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:00.191855Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NTM1MjEzZDUtMTJhMTlhZTAtOGQwMDZmY2ItMWQ0MjdlZTU=, ActorId: [5:7439873263761483887:2771], ActorState: ExecuteState, TraceId: 01jd8fq056d84tjk0v2yv6dgm9, EndCleanup, isFinal: 1 2024-11-21T23:08:00.191909Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NTM1MjEzZDUtMTJhMTlhZTAtOGQwMDZmY2ItMWQ0MjdlZTU=, ActorId: [5:7439873263761483887:2771], ActorState: ExecuteState, TraceId: 01jd8fq056d84tjk0v2yv6dgm9, Sent query response back to proxy, proxyRequestId: 78, proxyId: [5:7439873177862135631:2165] 2024-11-21T23:08:00.191933Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NTM1MjEzZDUtMTJhMTlhZTAtOGQwMDZmY2ItMWQ0MjdlZTU=, ActorId: [5:7439873263761483887:2771], ActorState: unknown state, TraceId: 01jd8fq056d84tjk0v2yv6dgm9, Cleanup temp tables: 0 2024-11-21T23:08:00.192311Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=NTM1MjEzZDUtMTJhMTlhZTAtOGQwMDZmY2ItMWQ0MjdlZTU=, ActorId: [5:7439873263761483887:2771], ActorState: unknown state, TraceId: 01jd8fq056d84tjk0v2yv6dgm9, Session actor destroyed 2024-11-21T23:08:00.452405Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, acquire mvcc snapshot 2024-11-21T23:08:00.468337Z node 5 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, read snapshot result: UNAVAILABLE, step: 1732230480001, tx id: 18446744073709551615 2024-11-21T23:08:00.468430Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, ExecutePhyTx, tx: 0x000050C000269818 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:00.468470Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, Sending to Executer TraceId: 0 8 2024-11-21T23:08:00.468588Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, Created new KQP executer: [5:7439873263761483922:2776] isRollback: 0 2024-11-21T23:08:00.473782Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T23:08:00.473941Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 21.553 QueriesCount: 2 2024-11-21T23:08:00.474098Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:00.474485Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:00.474512Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, EndCleanup, isFinal: 0 2024-11-21T23:08:00.474572Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ExecuteState, TraceId: 01jd8fq09f0fqmf8rxc7etrk6f, Sent query response back to proxy, proxyRequestId: 80, proxyId: [5:7439873177862135631:2165] 2024-11-21T23:08:00.475593Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:00.475661Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2024-11-21T23:08:00.475722Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ReadyState, Created new KQP executer: [5:7439873263761483932:2776] isRollback: 1 2024-11-21T23:08:00.475757Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:00.488501Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: CleanupState, EndCleanup, isFinal: 1 2024-11-21T23:08:00.488540Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:00.488705Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=YTE2YTc5OWYtNGMwZGY0YzktMzRkMmU0MzYtNTRiNjhhOGQ=, ActorId: [5:7439873263761483900:2776], ActorState: unknown state, Session actor destroyed 2024-11-21T23:08:00.512237Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, acquire mvcc snapshot 2024-11-21T23:08:00.519604Z node 5 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, read snapshot result: UNAVAILABLE, step: 1732230480512, tx id: 18446744073709551615 2024-11-21T23:08:00.519677Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, ExecutePhyTx, tx: 0x000050C000324298 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:00.519712Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, Sending to Executer TraceId: 0 8 2024-11-21T23:08:00.519784Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, Created new KQP executer: [5:7439873263761483941:2768] isRollback: 0 2024-11-21T23:08:00.529044Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T23:08:00.529233Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 16.98 QueriesCount: 2 2024-11-21T23:08:00.529368Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:00.529692Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:00.529718Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, EndCleanup, isFinal: 0 2024-11-21T23:08:00.529781Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ExecuteState, TraceId: 01jd8fq04b3gmw6aacqyst100v, Sent query response back to proxy, proxyRequestId: 77, proxyId: [5:7439873177862135631:2165] 2024-11-21T23:08:00.540437Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:00.540537Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2024-11-21T23:08:00.540612Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ReadyState, Created new KQP executer: [5:7439873263761483952:2768] isRollback: 1 2024-11-21T23:08:00.540658Z node 5 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:00.541338Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: CleanupState, EndCleanup, isFinal: 1 2024-11-21T23:08:00.541362Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:00.541495Z node 5 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=5&id=OTQ3MmRkNjgtZGFmMDM5NjktNTUwNDNmZS1iMGUzYzMy, ActorId: [5:7439873263761483879:2768], ActorState: unknown state, Session actor destroyed >> YdbYqlClient::AlterTableAddIndex [GOOD] |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |80.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity >> PartitionStats::CollectorOverload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2024-11-21T23:07:36.162841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873160563755035:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:36.162890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dce/r3tmp/tmpBpvsTm/pdisk_1.dat 2024-11-21T23:07:36.854364Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:36.856703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:36.856804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:36.863536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5625, node 1 2024-11-21T23:07:37.124358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:37.124386Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:37.124393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:37.124471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:37.527665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:37.548533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:37.548589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:37.555331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:37.555528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:37.555540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:37.558724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:37.558751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:37.560418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:37.563291Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:37.569781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230457615, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:37.569828Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:37.570100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:37.572165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:37.572333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:37.572376Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:37.572455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:37.572491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:37.572525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:37.575429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:37.575476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:37.575489Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:37.575576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:40.453110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:40.454230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:40.454946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:40.454986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:40.459668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:40.459895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:40.460087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:40.460175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:40.462120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:40.462162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:40.462193Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:40.463451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:40.463483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:40.463497Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:40.468471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:40.477421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:40.477728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:40.477897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:40.477930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:40.502967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:40.645838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:40.645865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:40.648505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:40.648528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:40.673772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:40.673798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:40.673832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:40.675756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:40.679205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230460723, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:40.679257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230460723 2024-11-21T23:07:40.679368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:40.694796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:40.695185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:40.695263Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:40.700138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046 ... maybe) 2024-11-21T23:07:56.988928Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:57.303239Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:57.303651Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:57.303677Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:57.310562Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:57.310755Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:57.310768Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:57.315072Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:57.318425Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:57.318461Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:57.322567Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:57.328948Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230477369, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:57.328984Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:57.329244Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:57.335249Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:57.335447Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:57.335515Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:57.335600Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:57.335663Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:57.335722Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:57.337558Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:57.337596Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:57.337614Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:57.337684Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:08:00.441218Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:00.441721Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:08:00.442354Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:00.442378Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:00.457985Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:08:00.458286Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:00.458570Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:00.458651Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:08:00.460726Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:00.460766Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:00.460784Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:08:00.461026Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:00.461043Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:00.461053Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:08:00.464191Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:08:00.483321Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:08:00.483447Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:08:00.495081Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:08:00.639293Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:08:00.639329Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:08:00.639412Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:08:00.647931Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:08:00.693251Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230480708, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:00.693318Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230480708 2024-11-21T23:08:00.693456Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:08:00.719494Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:00.719851Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:00.719909Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:08:00.742923Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:00.743056Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:00.743078Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:08:00.743391Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:00.743410Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:00.743421Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:08:00.746355Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710658 Step: 1732230480708 OrderId: 281474976710658 ExecLatency: 0 ProposeLatency: 36 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1460 } } 2024-11-21T23:08:00.758035Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:08:00.758091Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:00.758120Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 129 -> 240 2024-11-21T23:08:00.760777Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:08:00.760914Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:08:00.760994Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |80.1%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |80.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |80.1%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout >> TDynamicNameserverTest::BasicFunctionality >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndex [GOOD] Test command err: 2024-11-21T23:07:17.581719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873076435919049:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:17.581768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001de2/r3tmp/tmpQfEX1u/pdisk_1.dat 2024-11-21T23:07:17.998024Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:18.004698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:18.004800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:18.014986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 TServer::EnableGrpc on GrpcPort 4390, node 1 2024-11-21T23:07:18.055382Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:18.064324Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:18.091445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:18.146150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:18.153013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:18.153059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:18.153151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:18.153215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:07:18.214818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:18.214836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:18.216035Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:18.218819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:18.550049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:18.565494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:18.565565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:18.571397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:18.571609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:18.571625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:18.579299Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:18.579329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:18.583174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:18.584007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:18.599836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230438638, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:18.599911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:18.600231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:18.603358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:18.603538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:18.603587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:18.603678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:18.603719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:18.603774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:18.615871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:18.615943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:18.615962Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:18.616065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:21.168856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/OlapStore, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:21.169077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:21.169177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/OlapStore/OlapTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T23:07:21.169739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 1 -> 2 2024-11-21T23:07:21.173058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:21.173088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T23:07:21.175269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/OlapStore/OlapTable 2024-11-21T23:07:21.175458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:21.175763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:21.175830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:21.176171Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:21.178721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:21.178868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:21.178897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:21.178916Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:21.179452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:21.179489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:21.179503Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:21.179644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:21.179660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:21.179670Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T23:07:21.183255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:21.183334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 2 -> 3 2024-11-21T23:07:21.185416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:1 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:21.268923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:1 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:21.268951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:21.269031Z node 1 :FLAT_TX_SCHEMESHARD IN ... 72057594046644480, txId: 281474976715668 2024-11-21T23:08:02.555801Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 4 2024-11-21T23:08:02.583522Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDropParts operationId#281474976715668:2 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:08:02.583562Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:08:02.583686Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 4 -> 128 2024-11-21T23:08:02.584241Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropIndexAtMainTable TConfigureParts operationId#281474976715668:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:08:02.584262Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:08:02.584323Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 3 -> 128 2024-11-21T23:08:02.587930Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976715668:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:02.588384Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropIndexAtMainTable TPropose operationId#281474976715668:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:02.607421Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230482654, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:02.607505Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropIndexAtMainTable TPropose operationId#281474976715668:0 HandleReply TEvOperationPlan, step: 1732230482654, at schemeshard: 72057594046644480 2024-11-21T23:08:02.607626Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 128 -> 129 2024-11-21T23:08:02.607747Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTableIndex TPropose, operationId: 281474976715668:1 HandleReply TEvOperationPlan, step: 1732230482654, at schemeshard: 72057594046644480 2024-11-21T23:08:02.607804Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 128 -> 136 2024-11-21T23:08:02.607881Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId#281474976715668:2 HandleReply TEvOperationPlan, step: 1732230482654, at schemeshard: 72057594046644480 2024-11-21T23:08:02.607914Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 128 -> 136 2024-11-21T23:08:02.619992Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:02.620466Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:02.620589Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTableIndex TWaitRenamedPathPublication operationId: 281474976715668:1 ProgressState, operation type: TxDropTableIndex, at tablet72057594046644480 2024-11-21T23:08:02.620632Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 136 -> 137 2024-11-21T23:08:02.620989Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:08:02.621170Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976715668:2 ProgressState, operation type: TxDropTable, at tablet72057594046644480 2024-11-21T23:08:02.621227Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 281474976715668:2 ProgressState, no renaming has been detected for this operation 2024-11-21T23:08:02.621252Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 136 -> 137 2024-11-21T23:08:02.624451Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T23:08:02.624510Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T23:08:02.624538Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2024-11-21T23:08:02.630906Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTableIndex TDeletePathBarrier operationId: 281474976715668:1 ProgressState, operation type: TxDropTableIndex, at tablet72057594046644480 2024-11-21T23:08:02.631271Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715668:2 ProgressState, operation type: TxDropTable, at tablet72057594046644480 2024-11-21T23:08:02.649988Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715668 Step: 1732230482654 OrderId: 281474976715668 ExecLatency: 0 ProposeLatency: 38 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 771 } } 2024-11-21T23:08:02.653123Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715668 Step: 1732230482654 OrderId: 281474976715668 ExecLatency: 0 ProposeLatency: 42 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1346 } } 2024-11-21T23:08:02.663521Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715668:2 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046644480 2024-11-21T23:08:02.666672Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:08:02.666744Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T23:08:02.666796Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 129 -> 240 2024-11-21T23:08:02.670071Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:0 ProgressState 2024-11-21T23:08:02.670224Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 1/3 2024-11-21T23:08:02.670246Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715668, done: 1, blocked: 2 2024-11-21T23:08:02.670354Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTableIndex TDeletePathBarrier operationId: 281474976715668:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: RenamePathBarrier }, at tablet72057594046644480 2024-11-21T23:08:02.671221Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 137 -> 240 2024-11-21T23:08:02.671472Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 281474976715668:2 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: RenamePathBarrier }, at tablet72057594046644480 2024-11-21T23:08:02.671553Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 137 -> 129 2024-11-21T23:08:02.674609Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:02.676216Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:02.676309Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:1 ProgressState 2024-11-21T23:08:02.676463Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 2/3 2024-11-21T23:08:02.676693Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:2 ProgressState at tablet: 72057594046644480 2024-11-21T23:08:02.676757Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:2, at schemeshard: 72057594046644480 2024-11-21T23:08:02.676784Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:2 129 -> 240 2024-11-21T23:08:02.677519Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T23:08:02.677563Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T23:08:02.677586Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 18446744073709551615 2024-11-21T23:08:02.679891Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T23:08:02.679926Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T23:08:02.679947Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 8 2024-11-21T23:08:02.680223Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T23:08:02.680251Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T23:08:02.680310Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715668 2024-11-21T23:08:02.680420Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715668 2024-11-21T23:08:02.680436Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715668, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 18446744073709551615 2024-11-21T23:08:02.684688Z node 10 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715668:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:02.689004Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:2 progress is 3/3 2024-11-21T23:08:02.689360Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2024-11-21T23:08:02.689475Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2024-11-21T23:08:02.689489Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:2 2024-11-21T23:08:02.700571Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2024-11-21T23:08:02.707309Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2024-11-21T23:08:02.709928Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TNodeBrokerTest::TestListNodes >> ErasureBrandNew::Block42_restore_benchmark [GOOD] |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |80.1%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |80.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatTableBackgroundCompactions::TestChangeBackgroundSnapshotToRegular >> TFlatTableBackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableBackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionToRegular >> TCutHistoryRestrictions::BasicTest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableBackgroundCompactions::TestRunBackgroundCompactionGen2 >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2024-11-21T23:07:14.121735Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873064146341014:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:14.121895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dee/r3tmp/tmpzxF47I/pdisk_1.dat 2024-11-21T23:07:14.800902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:14.800992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:14.819640Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:14.826675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4535, node 1 2024-11-21T23:07:15.213850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:15.213875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:15.213882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:15.213980Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:15.505095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:15.509886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:15.509917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:15.511813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:15.511960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:15.511985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:15.513812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:15.517529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:15.517560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:15.522357Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:15.529583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230435572, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:15.529641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:15.529936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:15.531808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:15.531978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:15.532025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:15.532121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:15.532159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:15.532210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:15.539300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:15.539360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:15.539377Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:15.539462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:18.213981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/Foo, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:18.214240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:18.224503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/Foo 2024-11-21T23:07:18.224669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:18.224845Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:18.224910Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:18.226633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:18.226740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:18.226776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:18.226792Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:18.227001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:18.227026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:18.227035Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T23:07:18.236786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230438281, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:18.236840Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230438281, at schemeshard: 72057594046644480 2024-11-21T23:07:18.236986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:07:18.243475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:18.243675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:18.243716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:18.243773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:07:18.243804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:18.243845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T23:07:18.244914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:18.244953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:18.244966Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:18.245145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:18.245159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:18.245168Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:18.245196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:07:18.312396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873081326211062:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:18.312511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:18.545576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo/Test, opId: 281474976710659:0, at schemeshard: 72057594046644480 20 ... ESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, ProgressState 2024-11-21T23:08:03.812090Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:03.815348Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.815419Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.815444Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:08:03.815685Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.815706Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.815718Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T23:08:03.815869Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.815888Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.815899Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T23:08:03.816005Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.816029Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.816037Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T23:08:03.816132Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.816146Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.816155Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 1 2024-11-21T23:08:03.825112Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:08:03.831687Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230483872, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:03.831745Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230483872, at schemeshard: 72057594046644480 2024-11-21T23:08:03.831876Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:0 128 -> 240 2024-11-21T23:08:03.832011Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230483872, at schemeshard: 72057594046644480 2024-11-21T23:08:03.832065Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:1 128 -> 240 2024-11-21T23:08:03.832135Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710659:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230483872, at schemeshard: 72057594046644480 2024-11-21T23:08:03.832178Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:2 128 -> 240 2024-11-21T23:08:03.832264Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710659:3, HandleReply TEvOperationPlan: step# 1732230483872 2024-11-21T23:08:03.832312Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710659:3 128 -> 240 2024-11-21T23:08:03.840274Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:03.840898Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:03.840999Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:2 ProgressState 2024-11-21T23:08:03.841105Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:2 progress is 1/4 2024-11-21T23:08:03.841300Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:0 ProgressState 2024-11-21T23:08:03.841354Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 2/4 2024-11-21T23:08:03.841448Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:3 ProgressState 2024-11-21T23:08:03.841495Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:3 progress is 3/4 2024-11-21T23:08:03.841581Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710659:1 ProgressState 2024-11-21T23:08:03.841627Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:1 progress is 4/4 2024-11-21T23:08:03.841660Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:08:03.841699Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:1 2024-11-21T23:08:03.841716Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:2 2024-11-21T23:08:03.841729Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:3 2024-11-21T23:08:03.841749Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 5, subscribers: 1 2024-11-21T23:08:03.877838Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.877939Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.877970Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:08:03.878535Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.878617Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.878660Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:08:03.878996Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.889570Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.889611Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:08:03.890368Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.890425Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.890463Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T23:08:03.890649Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:08:03.890668Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:08:03.890680Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 2 2024-11-21T23:08:03.890719Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2024-11-21T23:08:03.914766Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439873275930060320:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:08:04.013227Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:08:04.013433Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:08:04.016428Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:08:04.219175Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jd8fq3t95w5xb2wtvmcgk4f3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTczZmIzNDgtZjEyN2M4OTQtNGQ1ODc0MjEtNTE4MDQ0NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:04.852099Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jd8fq499azq8t0675fwtsq6d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NTczZmIzNDgtZjEyN2M4OTQtNGQ1ODc0MjEtNTE4MDQ0NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> THiveTest::TestDrain >> THiveTest::TestReCreateTablet >> TFlatTableBackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableBackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::OverwriteStoragePolicy |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TFlatTableBackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> TDynamicNameserverTest::BasicFunctionality [GOOD] >> TableCreator::CreateTables >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |80.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert |80.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2024-11-21T23:08:07.901385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:07.901447Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:08.020448Z node 1 :NODE_BROKER ERROR: Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s |80.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_restore_benchmark [GOOD] Test command err: totalSize# 497747819 period1# 2.165319s period2# 1.229701s MB/s1# 229.8727435 MB/s2# 404.7714192 factor# 1.760849995 |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> IncrementalRestoreScan::ChangeSenderSimple >> TGRpcYdbTest::ExecuteQueryCache [GOOD] >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] >> KqpDataIntegrityTrails::UpsertEvWrite |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> TNodeBrokerTest::TestListNodes [GOOD] >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> IncrementalRestoreScan::ChangeSenderEmpty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableBackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] Test command err: Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2050b 40r} data 2167b + FlatIndex{1} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 0 0 2050b (0, 1) | 0 39 2050b (5, 7) + BTreeIndex{Empty, PageId: 0 RowCount: 40 DataSize: 2050 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{0} Label{04 rev 1, 2050b}, [0, +40)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b (0, 1) | 3 39 620b (5, 7) + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b (0, 1) | 1 39 2466b (5, 7) + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: (0, 1) {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: (0, 3) {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: (0, 4) {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: (0, 6) {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: (0, 7) {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: (0, 8) {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: (0, 10) {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: (1, 1) {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: (1, 3) {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: (1, 4) {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: (1, 6) {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: (1, 7) {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: (1, 8) {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: (1, 10) {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: (2, 1) {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: (2, 3) {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: (2, 4) {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: (2, 6) {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: (2, 7) {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: (2, 8) {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: (2, 10) {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: (3, 1) {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: (3, 3) {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: (3, 4) {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: (3, 6) {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: (3, 7) {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: (3, 8) {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: (3, 10) {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: (4, 1) {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: (4, 3) {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: (4, 4) {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: (4, 6) {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: (4, 7) {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: (4, 8) {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: (4, 10) {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: (5, 1) {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: (5, 3) {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: (5, 4) {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: (5, 6) {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: (5, 7) {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 8474b 40r} data 6832b + FlatIndex{15} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 7 0 1036b (0, 1) | 7 39 1036b (5, 7) + BTreeIndex{Empty, PageId: 7 RowCount: 40 DataSize: 1036 GroupDataSize: 7438 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{7} Label{74 rev 1, 1036b}, [0, +40)row | ERowOp 1: (0, 1) | ERowOp 1: (0, 3) | ERowOp 1: (0, 4) | ERowOp 1: (0, 6) | ERowOp 1: (0, 7) | ERowOp 1: (0, 8) | ERowOp 1: (0, 10) | ERowOp 1: (1, 1) | ERowOp 1: (1, 3) | ERowOp 1: (1, 4) | ERowOp 1: (1, 6) | ERowOp 1: (1, 7) | ERowOp 1: (1, 8) | ERowOp 1: (1, 10) | ERowOp 1: (2, 1) | ERowOp 1: (2, 3) | ERowOp 1: (2, 4) | ERowOp 1: (2, 6) | ERowOp 1: (2, 7) | ERowOp 1: (2, 8) | ERowOp 1: (2, 10) | ERowOp 1: (3, 1) | ERowOp 1: (3, 3) | ERowOp 1: (3, 4) | ERowOp 1: (3, 6) | ERowOp 1: (3, 7) | ERowOp 1: (3, 8) | ERowOp 1: (3, 10) | ERowOp 1: (4, 1) | ERowOp 1: (4, 3) | ERowOp 1: (4, 4) | ERowOp 1: (4, 6) | ERowOp 1: (4, 7) | ERowOp 1: (4, 8) | ERowOp 1: (4, 10) | ERowOp 1: (5, 1) | ERowOp 1: (5, 3) | ERowOp 1: (5, 4) | ERowOp 1: (5, 6) | ERowOp 1: (5, 7) Slices{ [0,39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b (0, 1) | 1 2 120b (0, 4) | 2 4 120b (0, 7) | 3 6 120b (0, 10) | 4 8 120b (1, 3) | 5 10 122b (1, 6) | 6 12 122b (1, 8) | 7 14 122b (2, NULL) | 8 16 122b (2, 4) | 9 18 122b (2, 7) | 10 20 122b (2, 10) | 11 22 122b (3, 3) | 12 24 122b (3, 6) | 13 26 122b (3, 8) | 14 28 122b (4, NULL) | 15 30 122b (4, 4) | 16 32 122b (4, 7) | 17 34 122b (4, 10) | 18 36 122b (5, 3) | 19 38 122b (5, 6) | 19 39 122b (5, 7) + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > (0, 4) | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > (0, 7) | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > (0, 10) | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > (1, 3) | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > (1, 6) | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > (1, 8) | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > ... urces {1, 0}) 00000.477 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen1 from 12.000000 to 0.000000 (remove task gen1-table-101-tablet-1 (50 by [20:29:2061])) 00000.477 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.477 DD| RESOURCE_BROKER: Submitted new background_compaction_gen2 task gen2-table-101-tablet-1 (57 by [20:29:2061]) priority=400 resources={1, 0} 00000.477 DD| RESOURCE_BROKER: Assigning waiting task gen2-table-101-tablet-1 (57 by [20:29:2061]) to queue queue_background_compaction 00000.477 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.479 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (56 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.479 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (56 by [20:29:2061]) to queue queue_compaction_gen0 00000.479 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (56 by [20:29:2061]) from queue queue_compaction_gen0 00000.479 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (56 by [20:29:2061]) to queue queue_compaction_gen0 00000.479 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.482 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (56 by [20:29:2061]) (release resources {1, 0}) 00000.482 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.483 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (58 by [20:29:2061]) priority=200 resources={1, 0} 00000.483 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (58 by [20:29:2061]) to queue queue_background_compaction 00000.484 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.485 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (58 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.485 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (58 by [20:29:2061]) to queue queue_compaction_gen0 00000.485 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (58 by [20:29:2061]) from queue queue_compaction_gen0 00000.485 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (58 by [20:29:2061]) to queue queue_compaction_gen0 00000.485 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.494 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (58 by [20:29:2061]) (release resources {1, 0}) 00000.494 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.496 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (59 by [20:29:2061]) priority=200 resources={1, 0} 00000.496 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (59 by [20:29:2061]) to queue queue_background_compaction 00000.496 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.498 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (59 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.498 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (59 by [20:29:2061]) to queue queue_compaction_gen0 00000.498 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (59 by [20:29:2061]) from queue queue_compaction_gen0 00000.498 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (59 by [20:29:2061]) to queue queue_compaction_gen0 00000.498 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.501 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (59 by [20:29:2061]) (release resources {1, 0}) 00000.501 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.503 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (60 by [20:29:2061]) priority=200 resources={1, 0} 00000.503 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (60 by [20:29:2061]) to queue queue_background_compaction 00000.503 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.505 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (60 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.505 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (60 by [20:29:2061]) to queue queue_compaction_gen0 00000.505 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (60 by [20:29:2061]) from queue queue_compaction_gen0 00000.505 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (60 by [20:29:2061]) to queue queue_compaction_gen0 00000.505 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.509 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (60 by [20:29:2061]) (release resources {1, 0}) 00000.509 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.511 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (61 by [20:29:2061]) priority=200 resources={1, 0} 00000.511 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (61 by [20:29:2061]) to queue queue_background_compaction 00000.511 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.513 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (61 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.513 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (61 by [20:29:2061]) to queue queue_compaction_gen0 00000.513 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (61 by [20:29:2061]) from queue queue_compaction_gen0 00000.513 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (61 by [20:29:2061]) to queue queue_compaction_gen0 00000.513 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.516 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (61 by [20:29:2061]) (release resources {1, 0}) 00000.516 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.516 DD| RESOURCE_BROKER: Submitted new background_compaction_gen1 task gen1-table-101-tablet-1 (62 by [20:29:2061]) priority=200 resources={1, 0} 00000.516 DD| RESOURCE_BROKER: Assigning waiting task gen1-table-101-tablet-1 (62 by [20:29:2061]) to queue queue_background_compaction 00000.516 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.575 DD| RESOURCE_BROKER: Update task gen2-table-101-tablet-1 (57 by [20:29:2061]) (priority=97 type=background_compaction_gen2 resources={1, 0} resubmit=0) 00000.575 DD| RESOURCE_BROKER: Assigning waiting task gen2-table-101-tablet-1 (57 by [20:29:2061]) to queue queue_background_compaction 00000.575 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (in-fly consumption {1, 0}) 00000.575 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.576 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987988 by [20:7:2054]) priority=150 resources={1, 0} 00000.576 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987988 by [20:7:2054]) to queue queue_background_compaction 00000.576 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.576 DD| RESOURCE_BROKER: Finish task bckg-block (987987987987 by [20:7:2054]) (release resources {1, 0}) 00000.576 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen2-table-101-tablet-1 (57 by [20:29:2061]) from queue queue_background_compaction 00000.576 DD| RESOURCE_BROKER: Assigning in-fly task gen2-table-101-tablet-1 (57 by [20:29:2061]) to queue queue_background_compaction 00000.576 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task gen2-table-101-tablet-1 (57 by [20:29:2061])) 00000.576 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.596 DD| RESOURCE_BROKER: Finish task gen2-table-101-tablet-1 (57 by [20:29:2061]) (release resources {1, 0}) 00000.596 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 600.000000 to 300.000000 (remove task gen2-table-101-tablet-1 (57 by [20:29:2061])) 00000.597 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987988 by [20:7:2054]) from queue queue_background_compaction 00000.597 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987988 by [20:7:2054]) to queue queue_background_compaction 00000.597 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task bckg-block (987987987988 by [20:7:2054])) 00000.597 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.604 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (63 by [20:29:2061]) priority=200 resources={1, 0} 00000.604 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [20:29:2061]) to queue queue_background_compaction 00000.604 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.606 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (63 by [20:29:2061]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.606 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [20:29:2061]) to queue queue_compaction_gen0 00000.606 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_compaction_gen0 from 0.000000 to 300.000000 00000.606 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (63 by [20:29:2061]) from queue queue_compaction_gen0 00000.606 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (63 by [20:29:2061]) to queue queue_compaction_gen0 00000.606 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.611 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (63 by [20:29:2061]) (release resources {1, 0}) 00000.611 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.659 DD| RESOURCE_BROKER: Update task gen1-table-101-tablet-1 (62 by [20:29:2061]) (priority=166 type=background_compaction_gen1 resources={1, 0} resubmit=0) 00000.659 DD| RESOURCE_BROKER: Assigning waiting task gen1-table-101-tablet-1 (62 by [20:29:2061]) to queue queue_background_compaction 00000.659 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (in-fly consumption {1, 0}) 00000.659 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.662 DD| RESOURCE_BROKER: Removing task gen1-table-101-tablet-1 (62 by [20:29:2061]) 00000.662 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.667 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 0 reqs hit {0 0b} miss {0 0b} 00000.667 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.667 II| FAKE_ENV: DS.0 gone, left {9702b, 90}, put {69314b, 689} 00000.668 II| FAKE_ENV: DS.1 gone, left {49679b, 125}, put {120827b, 750} 00000.668 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.668 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.668 II| FAKE_ENV: All BS storage groups are stopped 00000.668 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 2.000m 00000.668 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 652}, stopped >> IncrementalRestoreScan::Empty |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2024-11-21T23:08:08.196687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:08.196758Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_2_1_clean ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2024-11-21T23:07:34.767077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873149326470517:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:34.773641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd3/r3tmp/tmpHXYqCk/pdisk_1.dat 2024-11-21T23:07:35.368373Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:35.391004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:35.391123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:35.396768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27340, node 1 2024-11-21T23:07:35.709975Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:35.710004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:35.710014Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:35.716835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:36.100957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:36.108535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:36.108592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:36.111364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:36.111604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:36.111628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:36.115244Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:36.115276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:36.115598Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:36.121921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:36.139625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230456180, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:36.139681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:36.140004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:36.148948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:36.149157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:36.149225Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:36.149315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:36.149360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:36.149428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:36.152878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:36.154433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:36.154460Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:36.154561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:40.202876Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873178968304516:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:40.210406Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd3/r3tmp/tmpZ7JLuh/pdisk_1.dat 2024-11-21T23:07:40.538932Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:40.634700Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:40.634800Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:40.638072Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24719, node 4 2024-11-21T23:07:41.067020Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:41.067041Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:41.067049Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:41.067167Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:41.510979Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.511618Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:41.511641Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.524656Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:41.524825Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:41.524843Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:07:41.535676Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:41.535706Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:41.537395Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:41.539339Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.546590Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230461591, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:41.546630Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:41.546871Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:41.548842Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:41.549006Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:41.549056Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:41.549132Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:07:41.549164Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:07:41.549202Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:07:41.550851Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940 ... known, add access: +(SR|DS):root@builtin 2024-11-21T23:08:09.460531Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:09.461128Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:09.461203Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, ProgressState 2024-11-21T23:08:09.461434Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:09.461509Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:09.461583Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:09.464388Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.464458Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.464483Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:08:09.464868Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.464893Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.464906Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:08:09.465058Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.465077Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.465087Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 3 2024-11-21T23:08:09.465227Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.465244Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.465256Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2024-11-21T23:08:09.465409Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.465429Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.465442Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 1 2024-11-21T23:08:09.475203Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:08:09.481605Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230489521, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:09.481659Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230489521, at schemeshard: 72057594046644480 2024-11-21T23:08:09.481782Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:08:09.481904Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230489521, at schemeshard: 72057594046644480 2024-11-21T23:08:09.481950Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 128 -> 240 2024-11-21T23:08:09.482014Z node 13 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230489521, at schemeshard: 72057594046644480 2024-11-21T23:08:09.482429Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:2 128 -> 240 2024-11-21T23:08:09.482512Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710658:3, HandleReply TEvOperationPlan: step# 1732230489521 2024-11-21T23:08:09.482558Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:3 128 -> 240 2024-11-21T23:08:09.484957Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:09.485551Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:09.485639Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:3 ProgressState 2024-11-21T23:08:09.485753Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:3 progress is 1/4 2024-11-21T23:08:09.485992Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:1 ProgressState 2024-11-21T23:08:09.486048Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:1 progress is 2/4 2024-11-21T23:08:09.486157Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:2 ProgressState 2024-11-21T23:08:09.486194Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:2 progress is 3/4 2024-11-21T23:08:09.486312Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:08:09.486358Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 4/4 2024-11-21T23:08:09.491058Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:08:09.491139Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:1 2024-11-21T23:08:09.491158Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:2 2024-11-21T23:08:09.491173Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:3 2024-11-21T23:08:09.491195Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 5, subscribers: 1 2024-11-21T23:08:09.493001Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.493053Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.493076Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:08:09.493353Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.493372Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.493386Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 5 2024-11-21T23:08:09.493510Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.493529Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.493537Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:08:09.493651Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.493668Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.493677Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:08:09.493779Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.493799Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.493809Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T23:08:09.493849Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:08:09.500906Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7439873302900918640:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:08:09.585571Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:08:09.585767Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:08:09.590383Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2024-11-21T23:07:40.344529Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873175475290981:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:40.344671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dbe/r3tmp/tmpIaSKY5/pdisk_1.dat 2024-11-21T23:07:41.222665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:41.222769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:41.238041Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:41.240464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13989, node 1 2024-11-21T23:07:41.679241Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:41.679269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:41.679278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:41.679384Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:42.241016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:42.248294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:42.248389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:42.251264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:42.251501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:42.251527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:42.255039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:42.255235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:42.255255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:42.263286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:42.274357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230462319, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:42.274429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:42.274738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:42.276843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:42.277043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:42.277098Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:42.277189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:42.277225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:42.277285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:42.282302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:42.282365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:42.282380Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:42.282489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:45.007460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873192655161089:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:45.007566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:45.339839Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873175475290981:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:45.361183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:45.362731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:45.363221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:45.364158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:45.364188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:45.368877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:45.369132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:45.369344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:45.369550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:45.377878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:45.377927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:45.377946Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:45.378248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:45.378263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:45.378276Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:45.381161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:45.395772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:45.395875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:45.402206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:45.468930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:07:45.468965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:07:45.469019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:45.470240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:07:45.476954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230465518, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:45.477018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230465518 2024-11-21T23:07:45.477113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:07:45.482777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:45.483079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:45.483133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:07:45.485304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Ow ... shed: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:08:05.136531Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:05.136926Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:05.136950Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:05.139730Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:08:05.139939Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:08:05.139956Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:08:05.146114Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:05.146146Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:08:05.147260Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:08:05.148964Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:05.158457Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230485202, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:05.158511Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:08:05.158797Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:08:05.161445Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:05.161645Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:05.161705Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:08:05.161809Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:08:05.161847Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:08:05.161897Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:08:05.162958Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:08:05.163005Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:08:05.163021Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:08:05.163110Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:08:09.025053Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439873279230286014:2183];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:09.025138Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:09.739135Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:09.739671Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:08:09.740253Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:09.740279Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:09.751918Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:08:09.752194Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:09.752472Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:09.752552Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:08:09.754806Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.754878Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.754904Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:08:09.755211Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.755239Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.755255Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:08:09.755497Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:08:09.770602Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:08:09.770735Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:08:09.792325Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:08:09.852643Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:08:09.852677Z node 10 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:08:09.852777Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:08:09.859484Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:08:09.864846Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230489906, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:09.864895Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230489906 2024-11-21T23:08:09.864994Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:08:09.871651Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:09.871948Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:09.872015Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:08:09.873372Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.873427Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.873444Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:08:09.873745Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:09.873797Z node 10 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:09.873822Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:08:09.875715Z node 10 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037888 Status: COMPLETE TxId: 281474976710658 Step: 1732230489906 OrderId: 281474976710658 ExecLatency: 0 ProposeLatency: 9 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1508 } } 2024-11-21T23:08:09.877654Z node 10 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:08:09.877690Z node 10 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:09.877717Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 129 -> 240 2024-11-21T23:08:09.885652Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:08:09.885773Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:08:09.885832Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::MoveIndexSameDst |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |80.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> TSchemeShardMoveTest::Reject ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2024-11-21T23:05:53.003374Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:05:53.224990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:05:53.309684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:05:53.309767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:05:53.310003Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:05:53.341774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:05:53.341971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:53.342195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:53.342293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:53.342402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:53.342503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:53.342597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:53.342708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:53.342839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:53.342944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.343028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:53.343103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:53.401098Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:05:53.401269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:05:53.410928Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:05:53.411132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:05:53.411173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:05:53.411325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:53.411539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:05:53.411610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:05:53.411648Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:05:53.411722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:05:53.411774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:05:53.411808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:05:53.411834Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:05:53.412019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:05:53.412079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:05:53.412117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:05:53.412146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:05:53.412229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:05:53.412278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:05:53.412316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:05:53.412340Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:05:53.412399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:05:53.412434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:05:53.412459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:05:53.412503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:05:53.412538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:05:53.412581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:05:53.412767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2024-11-21T23:05:53.412846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2024-11-21T23:05:53.412948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2024-11-21T23:05:53.413036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2024-11-21T23:05:53.413187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:05:53.413251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:05:53.413297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:05:53.413497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:05:53.413555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.413593Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:05:53.413738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:05:53.413783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:05:53.413813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:05:53.414011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:05:53.414067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:05:53.414098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... shLoadingTime=20921; 2024-11-21T23:08:10.080305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=40833; 2024-11-21T23:08:10.080676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=287; 2024-11-21T23:08:10.082303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=238; 2024-11-21T23:08:10.082386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1650; 2024-11-21T23:08:10.085455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=176; 2024-11-21T23:08:10.085633Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:08:10.085703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=187; 2024-11-21T23:08:10.085842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=78; 2024-11-21T23:08:10.085966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=66; 2024-11-21T23:08:10.086693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=667; 2024-11-21T23:08:10.087897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1137; 2024-11-21T23:08:10.088042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=83; 2024-11-21T23:08:10.088175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=86; 2024-11-21T23:08:10.088230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2024-11-21T23:08:10.088283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2024-11-21T23:08:10.088334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2024-11-21T23:08:10.088453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=73; 2024-11-21T23:08:10.088518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=18; 2024-11-21T23:08:10.088633Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=70; 2024-11-21T23:08:10.088684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2024-11-21T23:08:10.088761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2024-11-21T23:08:10.088820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=58549; 2024-11-21T23:08:10.089046Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40196124;raw_bytes=67627660; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64332088;raw_bytes=108739216; inactive portions=44;blobs=88;rows=1246652;bytes=70696640;raw_bytes=119259832; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:08:10.089186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1670:3646];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:08:10.089295Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:08:10.089401Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T23:08:10.089638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1670:3646];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:08:10.089691Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:08:10.089794Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:08:10.089852Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:08:10.089933Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:08:10.090220Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=21; 2024-11-21T23:08:10.090303Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T23:08:10.090357Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:08:10.090599Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:10.090667Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:10.091021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:08:10.091148Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:08:10.092052Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1703:3672];tablet_id=9437184;parent=[1:1670:3646];fline=manager.h:99;event=ask_data;request=request_id=281;1={portions_count=91};; 2024-11-21T23:08:10.092869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:08:10.093843Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1703:3672];tablet_id=9437184;parent=[1:1670:3646];fline=manager.h:99;event=ask_data;request=request_id=283;1={portions_count=47};; 2024-11-21T23:08:10.106019Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:08:10.106791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:08:10.106848Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:08:10.106894Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:08:10.106977Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:08:10.107081Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:08:10.107395Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=21; 2024-11-21T23:08:10.107481Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T23:08:10.107538Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:08:10.107616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:10.107673Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:10.107748Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:08:10.107863Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:08:10.108761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2024-11-21T23:08:10.115440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2024-11-21T23:08:10.125073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:08:10.125206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1670:3646];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TSchemeShardMoveTest::TwoTables >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> TSchemeShardMoveTest::Boot >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |80.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TableCreator::CreateTables [GOOD] >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> IncrementalRestoreScan::Empty [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:15.806284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:15.806375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:15.806516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:15.806575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:15.806630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:15.806659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:15.806738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:15.807056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:15.896185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:15.896261Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:15.921742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:15.925030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:15.925180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:15.931395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:15.931979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:15.932585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:15.932863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:15.937044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:15.938460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:15.938526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:15.938747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:15.938805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:15.938845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:15.938942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.949132Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:16.121409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:16.121711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:16.122000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:16.122276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:16.122340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:16.128343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:16.128530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:16.128745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:16.128816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:16.128883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:16.128922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:16.131346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:16.131402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:16.131440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:16.133857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:16.133922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:16.134010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:16.134074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:16.138161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:16.141637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:16.141856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:16.142929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:16.143104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:16.143167Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:16.143437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:16.143485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:16.143645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:16.143767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:16.147483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:16.147533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:16.147806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:16.147853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:16.148216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:16.148268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:16.148388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:16.148450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:16.148497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:16.148586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:16.148623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:16.148656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:16.148728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:16.148838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:16.148879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:16.157575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:16.157755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:16.157794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:16.157859Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:16.157901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:16.158010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... or pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:08:17.013504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:08:17.013568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T23:08:17.013591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T23:08:17.013638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T23:08:17.013680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:08:17.014045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:08:17.014103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:08:17.014172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T23:08:17.014211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:08:17.014247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:08:17.035686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:08:17.035769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:08:17.035806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:492:2456] TestWaitNotification: OK eventTxId 103 2024-11-21T23:08:17.036522Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:17.036735Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 262us result status StatusPathDoesNotExist 2024-11-21T23:08:17.036930Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:08:17.037442Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:17.037664Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 228us result status StatusSuccess 2024-11-21T23:08:17.038022Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:17.038756Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:17.038980Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 207us result status StatusPathDoesNotExist 2024-11-21T23:08:17.039121Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:08:17.039579Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:17.039776Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 189us result status StatusSuccess 2024-11-21T23:08:17.040187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:17.045644Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:17.047352Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 6.36ms result status StatusSuccess 2024-11-21T23:08:17.047865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2024-11-21T23:08:10.362985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873305246466550:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001b0d/r3tmp/tmp5Rws7d/pdisk_1.dat 2024-11-21T23:08:10.866674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:08:11.137364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:11.137839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:11.228291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:11.235386Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22011 TServer::EnableGrpc on GrpcPort 30791, node 1 2024-11-21T23:08:11.802590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:08:11.802614Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:08:11.802621Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:08:11.802723Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T23:08:12.128611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:08:12.155322Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:08:12.161992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T23:08:12.166809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2024-11-21T23:08:15.210727Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873305246466550:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:15.210827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> TSchemeShardMoveTest::Index [GOOD] |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |80.3%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2024-11-21T23:08:16.665601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:16.672898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:16.673080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001288/r3tmp/tmpP310vU/pdisk_1.dat 2024-11-21T23:08:17.331998Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Exhausted 2024-11-21T23:08:17.332144Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2024-11-21T23:08:17.332195Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][[OwnerId: 1, LocalPathId: 2]][[OwnerId: 3, LocalPathId: 4]][1:562:2488] Finish 0 |80.3%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> TBackupTests::BackupUuidColumn[Zstd] >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestRestartsWithFollower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:14.830216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:14.830303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:14.830354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:14.830832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:14.830891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:14.830919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:14.831006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:14.831340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:14.943740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:14.943810Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:14.958182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:14.962709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:14.962973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:14.970647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:14.971415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:14.972135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:14.972523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:14.977432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:14.978963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:14.979027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:14.979259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:14.979317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:14.979361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:14.979471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:14.987039Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:15.156221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:15.156552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.156810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:15.157098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:15.157160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.163578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:15.163762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:15.163987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.164077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:15.164146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:15.164195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:15.171579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.171683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:15.171748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:15.174525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.174607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.174675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.174744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.179381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:15.188415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:15.188720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:15.189891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:15.190081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:15.190150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.190580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:15.190643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.190869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:15.190982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:15.196824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:15.196907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:15.197207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:15.197261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:15.198648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.198717Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:15.198842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:15.198894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.198954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:15.199014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.199059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:15.199099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:15.199182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:15.199297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:15.199342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:15.201539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:15.201689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:15.201745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:15.201791Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:15.201832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:15.201938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 59Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:17.884266Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:437:2401], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:17.884635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T23:08:17.884733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:08:17.884867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T23:08:17.884895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T23:08:17.884925Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T23:08:17.885146Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:17.885246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:17.885294Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T23:08:17.885331Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T23:08:17.887207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.887271Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T23:08:17.887381Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T23:08:17.887418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:08:17.887462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T23:08:17.887543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:122:2148] message: TxId: 281474976710760 2024-11-21T23:08:17.887604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:08:17.887641Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T23:08:17.887672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T23:08:17.887749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T23:08:17.890694Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T23:08:17.890798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T23:08:17.890866Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T23:08:17.890960Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:437:2401], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:17.893269Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:17.893386Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:437:2401], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:17.893452Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T23:08:17.896840Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:17.896968Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:437:2401], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:17.897022Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T23:08:17.897189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:17.897243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:613:2566] TestWaitNotification: OK eventTxId 102 2024-11-21T23:08:17.897921Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:17.898230Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 363us result status StatusSuccess 2024-11-21T23:08:17.898756Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2024-11-21T23:08:15.676245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:15.685183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:15.685393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012a7/r3tmp/tmpVqTk6Z/pdisk_1.dat 2024-11-21T23:08:16.475405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T23:08:16.475674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:16.475940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T23:08:16.476203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:16.476283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:16.477115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:16.477276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:08:16.477511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:16.477608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:08:16.477666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:16.477710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:16.478494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:16.478549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:16.478589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:16.479086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:16.479126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:16.479170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T23:08:16.479253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:16.483309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:16.483872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:16.484075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:08:16.485204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:16.485254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T23:08:16.485292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:16.534898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T23:08:16.534974Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:16.589160Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:16.590189Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:08:16.590412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:16.590559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:16.602061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:16.728570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:16.728757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T23:08:16.728818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T23:08:16.729079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:16.729137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T23:08:16.729328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:08:16.729425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:08:16.731053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:16.731107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:08:16.731273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:16.731315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T23:08:16.731636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:16.731697Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T23:08:16.731834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:16.731880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:16.731934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:16.731981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:16.732019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:16.732053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:16.732134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T23:08:16.732172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T23:08:16.732215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T23:08:16.742824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T23:08:16.743031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T23:08:16.743081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T23:08:16.743129Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:08:16.743182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:08:16.743335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T23:08:16.743390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T23:08:16.744664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T23:08:16.745088Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:08:16.745144Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:08:16.745315Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:08:16.768945Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:08:16.769685Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:08:16.769815Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:08:16.770131Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:08:16.770304Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResoluti ... 024-11-21T23:08:17.595674Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037889 state Ready 2024-11-21T23:08:17.595752Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T23:08:17.595954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:17.596010Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715658:0 ProgressState 2024-11-21T23:08:17.596165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2024-11-21T23:08:17.596215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2024-11-21T23:08:17.596350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2024-11-21T23:08:17.596476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:558:2485] message: TxId: 281474976715658 2024-11-21T23:08:17.596545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2024-11-21T23:08:17.596609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2024-11-21T23:08:17.596650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2024-11-21T23:08:17.596786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T23:08:17.597489Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvNavigate describe path /Root/IncrBackupTable 2024-11-21T23:08:17.597613Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] HANDLE EvNavigateScheme /Root/IncrBackupTable 2024-11-21T23:08:17.599513Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T23:08:17.599664Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2024-11-21T23:08:17.600787Z node 1 :TX_PROXY DEBUG: Actor# [1:779:2640] Handle TEvDescribeSchemeResult Forward to# [1:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T23:08:17.601914Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:789:2644], serverId# [1:790:2645], sessionId# [0:0:0] 2024-11-21T23:08:17.603355Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:08:17.603647Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:08:17.603934Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T23:08:17.604156Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2024-11-21T23:08:17.604296Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:08:17.604524Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvGetProxyServicesRequest 2024-11-21T23:08:17.604602Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T23:08:17.604951Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:796:2650], serverId# [1:797:2651], sessionId# [0:0:0] 2024-11-21T23:08:17.657238Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:08:17.657398Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:08:17.657547Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:795:2646] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:08:17.657614Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2024-11-21T23:08:17.657815Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:791:2646] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2024-11-21T23:08:16.600593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:16.614934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:16.615247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00129c/r3tmp/tmpfoHXa0/pdisk_1.dat 2024-11-21T23:08:17.402044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2024-11-21T23:08:17.402299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:17.402539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T23:08:17.402790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:17.402838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:17.403518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:17.403653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:08:17.403855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:17.403934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:08:17.403980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:17.404015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:17.404613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:17.404655Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:17.404686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:17.405131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:17.405165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:17.405205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046644480 2024-11-21T23:08:17.405256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:17.414401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:17.415125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:17.415292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:08:17.416366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:17.416425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2024-11-21T23:08:17.416485Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:17.462854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2024-11-21T23:08:17.462918Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:17.527143Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:17.528017Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:08:17.528239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:17.528350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:17.543108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:17.665771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:17.666096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T23:08:17.666191Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T23:08:17.667134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:17.667255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046644480 2024-11-21T23:08:17.667602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:08:17.667772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:08:17.670147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:17.670253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:08:17.672486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:17.672564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:541:2469], at schemeshard: 72057594046644480, txId: 1, path id: 1 2024-11-21T23:08:17.673190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:17.673293Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2024-11-21T23:08:17.673496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:17.673573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:17.675687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:17.675793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:17.675874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:17.675943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:17.676144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T23:08:17.676240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T23:08:17.676330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T23:08:17.679123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T23:08:17.679243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2024-11-21T23:08:17.679294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2024-11-21T23:08:17.679339Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:08:17.679379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:08:17.679514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T23:08:17.679563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:558:2485] 2024-11-21T23:08:17.680543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2024-11-21T23:08:17.680933Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:08:17.680985Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:08:17.681118Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:08:17.706458Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:08:17.707108Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:08:17.707217Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:08:17.707524Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:08:17.707680Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResoluti ... .TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2024-11-21T23:08:18.619703Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvNavigate describe path /Root/IncrBackupTable 2024-11-21T23:08:18.619804Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] HANDLE EvNavigateScheme /Root/IncrBackupTable 2024-11-21T23:08:18.620169Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T23:08:18.620249Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2024-11-21T23:08:18.621280Z node 1 :TX_PROXY DEBUG: Actor# [1:791:2646] Handle TEvDescribeSchemeResult Forward to# [1:558:2485] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T23:08:18.621976Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:08:18.622222Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:08:18.637725Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T23:08:18.638151Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:793:2648] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> TBackupTests::ShouldSucceedOnLargeData[Raw] |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:14.824071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:14.824186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:14.824244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:14.824281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:14.824324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:14.824354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:14.824409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:14.824793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:14.925039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:14.925114Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:14.938047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:14.942074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:14.942293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:14.949774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:14.950451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:14.951075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:14.951431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:14.956657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:14.958069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:14.958138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:14.958354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:14.958431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:14.958479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:14.958601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:14.974343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:15.129771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:15.130047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.130270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:15.130635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:15.130704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.135676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:15.135972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:15.136193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.136268Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:15.136333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:15.136412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:15.141061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.141130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:15.141174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:15.151628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.151711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.151771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.151840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.155892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:15.158931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:15.159162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:15.160205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:15.160372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:15.160447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.160742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:15.160793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.160968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:15.161091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:15.163543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:15.163593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:15.163844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:15.163907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:15.164313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.164362Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:15.164496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:15.164530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.164592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:15.164654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.164694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:15.164723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:15.164798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:15.164887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:15.164924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:15.167955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:15.168099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:15.168145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:15.168192Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:15.168261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:15.168379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:18.480607Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:18.480757Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 179us result status StatusSuccess 2024-11-21T23:08:18.481080Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:18.481568Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:08:18.481783Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 239us result status StatusSuccess 2024-11-21T23:08:18.482942Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:18.483602Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:08:18.483830Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 251us result status StatusSuccess 2024-11-21T23:08:18.484466Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.3%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:16.952415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:16.952532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:16.952591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:16.952627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:16.952669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:16.952708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:16.952766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:16.953107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:17.110287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:17.110345Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:17.137233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:17.140998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:17.141176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:17.156288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:17.157139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:17.157744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:17.158028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:17.163193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:17.164419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:17.164502Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:17.164673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:17.164722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:17.164755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:17.164851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.171602Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:17.375157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:17.375402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.375658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:17.375904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:17.375956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.378697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:17.378844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:17.379036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.379106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:17.379150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:17.379191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:17.381693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.381769Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:17.381824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:17.383753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.383815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.383869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:17.383937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:17.387975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:17.390078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:17.390259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:17.391646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:17.391871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:17.391944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:17.392220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:17.392282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:17.392495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:17.392605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:17.395045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:17.395090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:17.395330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:17.395370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:17.395739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:17.395784Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:17.395869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:17.395912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:17.395968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:17.396015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:17.396049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:17.396077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:17.396141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:17.396229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:17.396260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:17.397921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:17.398038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:17.398078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:17.398116Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:17.398152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:17.398253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 863 } } 2024-11-21T23:08:19.094622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:19.094740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 863 } } 2024-11-21T23:08:19.094813Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 863 } } FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T23:08:19.096216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T23:08:19.096260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2024-11-21T23:08:19.096374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T23:08:19.096414Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:19.096529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T23:08:19.096594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:19.096634Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T23:08:19.096676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:19.096713Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2024-11-21T23:08:19.097574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 8589936896 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T23:08:19.097612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:19.097690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 8589936896 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T23:08:19.097717Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:19.097775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 320 RawX2: 8589936896 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2024-11-21T23:08:19.097811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:19.097837Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:19.097878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:19.097923Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2024-11-21T23:08:19.108286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T23:08:19.111447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:19.111814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T23:08:19.112507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T23:08:19.112560Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:19.112607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T23:08:19.112720Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2024-11-21T23:08:19.112755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2024-11-21T23:08:19.112795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2024-11-21T23:08:19.117037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:19.117294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:19.117332Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:19.117364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T23:08:19.117436Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2024-11-21T23:08:19.117461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2024-11-21T23:08:19.117495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2024-11-21T23:08:19.117550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2024-11-21T23:08:19.117598Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:08:19.117628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:08:19.117753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T23:08:19.117793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:08:19.117829Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T23:08:19.117849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T23:08:19.117873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T23:08:19.117892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:08:19.117909Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2024-11-21T23:08:19.117926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2024-11-21T23:08:19.117960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T23:08:19.117990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T23:08:19.118642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:08:19.118689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:08:19.118758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:08:19.118799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:08:19.118828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:08:19.118854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:08:19.118879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:19.134953Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:08:19.135476Z node 2 :TX_PROXY DEBUG: actor# [2:266:2258] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2024-11-21T23:08:19.209125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:08:19.209185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:08:19.209632Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:08:19.209717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:08:19.209759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:658:2546] TestWaitNotification: OK eventTxId 103 >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict [GOOD] >> TSchemeShardMoveTest::OneTable [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:14.889082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:14.889187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:14.889253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:14.889293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:14.889341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:14.889368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:14.889426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:14.889770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:15.072633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:15.072700Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:15.101230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:15.110934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:15.111145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:15.132091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:15.138689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:15.139329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:15.139718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:15.150863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:15.152245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:15.152312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:15.152535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:15.152590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:15.152630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:15.152730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.170626Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:15.375066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:15.375330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.375549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:15.375754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:15.375806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.391493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:15.391641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:15.391851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.391919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:15.391980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:15.392025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:15.395452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.395521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:15.395559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:15.403424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.403501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.403565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.403635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.408838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:15.419330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:15.419547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:15.420646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:15.420799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:15.420846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.421137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:15.421189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:15.421349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:15.421433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:15.428006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:15.428489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:15.428719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:15.428761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:15.429140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:15.429185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:15.429279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:15.429307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.429355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:15.429405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:15.429438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:15.429481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:15.429549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:15.429635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:15.429665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:15.435196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:15.435347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:15.435388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:15.435424Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:15.435463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:15.435586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.445700Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 ProgressState, operation type: TxDropTable, at tablet72057594046678944 2024-11-21T23:08:20.445762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T23:08:20.445810Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2024-11-21T23:08:20.445931Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T23:08:20.446093Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2024-11-21T23:08:20.446252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:20.446326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:08:20.452249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.454159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.454586Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:20.454670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:20.454894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:08:20.455079Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:20.455141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 108, path id: 1 2024-11-21T23:08:20.455213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 108, path id: 4 2024-11-21T23:08:20.455717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.455785Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:08:20.455914Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.455979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:20.456044Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2024-11-21T23:08:20.457238Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T23:08:20.457410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T23:08:20.457467Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T23:08:20.457517Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2024-11-21T23:08:20.457569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:20.458953Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T23:08:20.459057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2024-11-21T23:08:20.459110Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2024-11-21T23:08:20.459153Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T23:08:20.459192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:08:20.459313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2024-11-21T23:08:20.463498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.463569Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:20.463898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:08:20.464041Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2024-11-21T23:08:20.464102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T23:08:20.464172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2024-11-21T23:08:20.464281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:339:2314] message: TxId: 108 2024-11-21T23:08:20.464342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2024-11-21T23:08:20.464411Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2024-11-21T23:08:20.464476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2024-11-21T23:08:20.464590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:08:20.465112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T23:08:20.467450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2024-11-21T23:08:20.471930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2024-11-21T23:08:20.472007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:817:2778] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T23:08:20.473016Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T23:08:20.473117Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2024-11-21T23:08:20.508306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 8589936886 } TabletId: 72075186233409546 State: 4 2024-11-21T23:08:20.508467Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T23:08:20.511008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:08:20.511615Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T23:08:20.511865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:20.512210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2024-11-21T23:08:20.514987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:08:20.515053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:08:20.515158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:20.519107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T23:08:20.519203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T23:08:20.520577Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2024-11-21T23:08:20.521626Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:20.521860Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 282us result status StatusSuccess 2024-11-21T23:08:20.522285Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlap::PredicatePushdown_MixStrictAndNotStrict [GOOD] Test command err: Trying to start YDB, gRPC: 65041, MsgBus: 5959 2024-11-21T23:08:13.466564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873319309902885:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:13.466727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00126a/r3tmp/tmpeZ4jnx/pdisk_1.dat 2024-11-21T23:08:14.132950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:14.133038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:14.134831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:14.161157Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65041, node 1 2024-11-21T23:08:14.414082Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:08:14.414109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:08:14.414117Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:08:14.414219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5959 TClient is connected to server localhost:5959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:08:15.006010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:08:15.037406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:08:15.052942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T23:08:15.229773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:08:15.230037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:08:15.230310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:08:15.231116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:08:15.231274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:08:15.231362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:08:15.238215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:08:15.238414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:08:15.238534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:08:15.238662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:08:15.238758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:08:15.238851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327899837999:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:08:15.287705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:08:15.287787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:08:15.288025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:08:15.288144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:08:15.288273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:08:15.288377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:08:15.288481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:08:15.288585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:08:15.288706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:08:15.288836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:08:15.288970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:08:15.289071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327899837998:2289];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:08:15.366867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327899838025:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:08:15.366943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327899838025:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:08:15.367189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327899838025:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:08:15.367286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327899838025:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:08:15.367378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327899838025:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:08:15.367468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327899838025:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:08:15.367585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327899838025:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:08:15.367686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327899838025:2292];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;descr ... node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:08:15.430140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:08:15.430169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:08:15.430272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:08:15.430314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:08:15.430499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:08:15.430540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:08:15.430612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:08:15.430631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:08:15.430756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:08:15.430776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:08:15.430818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:08:15.430840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:08:15.431147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:08:15.431187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:08:15.431356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:08:15.431376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:08:15.431494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:08:15.431533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:08:15.431716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:08:15.431748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:08:15.431831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:08:15.431847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:08:15.432325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:08:15.432377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:08:15.432499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:08:15.432526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:08:15.432693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:08:15.432717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:08:15.432884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:08:15.432920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:08:15.432990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:08:15.433015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:08:15.433043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:08:15.433088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:08:15.433354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:08:15.433394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:08:15.433575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:08:15.433605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:08:15.433749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:08:15.433771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:08:15.433902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:08:15.433930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:08:15.434054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:08:15.434076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:08:15.498683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=5800;columns=5; 2024-11-21T23:08:18.466599Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873319309902885:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:18.466850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:18.622907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873340784740227:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:18.623187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:18.623596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873340784740262:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:18.644352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T23:08:18.698705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873340784740264:2406], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestRestartTablets >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |80.3%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |80.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> TSchemeShardMoveTest::Replace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:19.676100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:19.676208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:19.676252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:19.676314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:19.676357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:19.676400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:19.676495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:19.676910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:19.831951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:19.832011Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:19.872307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:19.876635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:19.876848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:19.918469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:19.925742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:19.926383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:19.926824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:19.964277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:19.965736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:19.965806Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:19.966025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:19.966104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:19.966147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:19.966247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:19.999369Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:20.350545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:20.350850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.351125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:20.351398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:20.351463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.363725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:20.363912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:20.364373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.364498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:20.364543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:20.364582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:20.367161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.367230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:20.367265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:20.369436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.370135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.370543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.370617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.374859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:20.377288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:20.377501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:20.378617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:20.378797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:20.378845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.379155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:20.379220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.379408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:20.379503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:20.381724Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:20.381792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:20.381980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:20.382019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:20.382452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.382506Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:20.382600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:20.382643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.382692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:20.382735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.382776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:20.382805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:20.382866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:20.382904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:20.382934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:20.384865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:20.384964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:20.385002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:20.385038Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:20.385072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:20.385179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ARD_BACKUP INFO: [Export] [s3] Finish: self# [1:475:2435], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:08:21.096899Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:474:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:13088 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F441D4EB-CFE6-417D-B874-E160193EB227 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:08:21.120571Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:13088 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B7FD7272-99AE-4A6B-BE0E-9F1FAF0A0D6C amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2024-11-21T23:08:21.146645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:21.146722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:08:21.147006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:21.147059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:08:21.147525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.147596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:21.148303Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2024-11-21T23:08:21.148517Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:472:2432] 2024-11-21T23:08:21.148651Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:473:2434], sender# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T23:08:21.149204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:21.149325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:21.149364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:21.149400Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:08:21.149439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:08:21.149550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:13088 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3D7C7E6B-8383-4563-B1E0-8F9AF96222DC amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2024-11-21T23:08:21.168826Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2024-11-21T23:08:21.168893Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:08:21.169084Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:08:21.187071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:08:21.219866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:21.219954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:08:21.220118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:21.220232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:21.220297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:21.220465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:21.220971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:21.221011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:21.221113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:21.221191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:21.221238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:21.221277Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.221324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:21.221378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:21.221420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:21.221522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:21.238913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.239387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.239808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.239861Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:21.240023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:21.240082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:21.240124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:21.240214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2342] message: TxId: 102 2024-11-21T23:08:21.240280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:21.240319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:21.240352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:21.240502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:21.242737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:21.242808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:449:2411] TestWaitNotification: OK eventTxId 102 >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> TSchemeShardMoveTest::MoveMigratedTable >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TTypeCodecsTest::TestFixedLenCodec >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> DSProxyStrategyTest::Restore_block42 [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] Test command err: 2024-11-21T23:06:35.067450Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872898934869329:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:35.067499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002433/r3tmp/tmpzFYQIS/pdisk_1.dat 2024-11-21T23:06:35.591960Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:35.609335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:35.609439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:35.616862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62853, node 1 2024-11-21T23:06:35.653614Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:35.653650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:35.653657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:35.653741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:36.047916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:36.063555Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:06:36.105450Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:39.248964Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:39.249041Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872916114738908:2302], Start check tables existence, number paths: 2 2024-11-21T23:06:39.251898Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTI3MTJhYjEtNzQ2MTJlZTEtYjgyNTc1MmItMzE3N2IyNDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTI3MTJhYjEtNzQ2MTJlZTEtYjgyNTc1MmItMzE3N2IyNDU= 2024-11-21T23:06:39.252177Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTI3MTJhYjEtNzQ2MTJlZTEtYjgyNTc1MmItMzE3N2IyNDU=, ActorId: [1:7439872916114738909:2303], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:39.302441Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:39.302482Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:39.302519Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:06:39.302744Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872916114738908:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:39.302804Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872916114738908:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:39.302834Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872916114738908:2302], Successfully finished 2024-11-21T23:06:39.314075Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:39.322218Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872916114738926:2298], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:39.326339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:39.332417Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872916114738926:2298], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2024-11-21T23:06:39.334010Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872916114738926:2298], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:06:39.341416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872916114738926:2298], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:06:39.426525Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872916114738926:2298], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:39.434300Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872916114738926:2298], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:06:39.434663Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2024-11-21T23:06:39.434684Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2024-11-21T23:06:39.434740Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872916114738984:2304], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:39.435794Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872916114738984:2304], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:39.435833Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2024-11-21T23:06:39.435854Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T23:06:39.436055Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872916114738993:2305], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T23:06:39.441651Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872916114738993:2305], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T23:06:39.454918Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T23:06:39.454939Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:06:39.454982Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872916114739005:2307], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T23:06:39.455104Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTI3MTJhYjEtNzQ2MTJlZTEtYjgyNTc1MmItMzE3N2IyNDU=, ActorId: [1:7439872916114738909:2303], ActorState: ReadyState, TraceId: 01jd8fmhev1z98d0k986y7fyyv, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: ALTER RESOURCE POOL sample_pool_id SET ( CONCURRENT_QUERY_LIMIT=42 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T23:06:39.460100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872916114739005:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:39.460200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:06:39.849872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:06:39.854538Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872916114738993:2305], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T23:06:39.854612Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872916114738993:2305], DatabaseId: Root, PoolId: sample_pool_id, Pool config has changed, queue size: -1, in flight limit: 42 2024-11-21T23:06:39.854840Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got resign request, DatabaseId: Root, PoolId: sample_pool_id 2024-11-21T23:06:39.854879Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872916114738993:2305], DatabaseId: Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2024-11-21T23:06:39.854918Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: Root, PoolId: sample_pool_id 2024-11-21T23:06:39.856869Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NTI3MTJhYjEtNzQ2MTJlZTEtYjgyNTc1MmItMzE3N2IyNDU=, ActorId: [1:7439872916114738909:2303], ActorState: ExecuteState, TraceId: 01jd8fmhev1z98d0k986y7fyyv, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7439872916114739006:2303] WorkloadServiceCleanup: 0 2024-11-21T23:06:39.858434Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTI3MTJhYjEtNzQ2MTJlZTEtYjgyNTc1MmItMzE3N2IyNDU=, ActorId: [1:7439872916114738909:2303], ActorState: CleanupState, TraceId: 01jd8fmhev1z98d0k986y7fyyv, EndCleanup, isFinal: 0 2024-11-21T23:06:39.858496Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NTI3MTJhYjEtNzQ2MTJlZTEtYjgyNTc1MmItMzE3N2IyNDU=, ActorId: [1:7439872916114738909:2303], ActorState: CleanupState, TraceId: 01jd8fmhev1z98d0k986y7fyyv, Sent query response back to proxy, proxyRequestId ... 4367714449:2201] 2024-11-21T23:08:19.421754Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=9&id=ZmQ3ZDU3NGUtNDNmNTM5NWUtYzMyZTczOC04MGM2YjdmYw==, TxId: 2024-11-21T23:08:19.421875Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=9&id=ZmQ3ZDU3NGUtNDNmNTM5NWUtYzMyZTczOC04MGM2YjdmYw==, TxId: 2024-11-21T23:08:19.422493Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZmQ3ZDU3NGUtNDNmNTM5NWUtYzMyZTczOC04MGM2YjdmYw==, ActorId: [9:7439873343281506807:2731], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:19.422548Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=ZmQ3ZDU3NGUtNDNmNTM5NWUtYzMyZTczOC04MGM2YjdmYw==, ActorId: [9:7439873343281506807:2731], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:19.422588Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZmQ3ZDU3NGUtNDNmNTM5NWUtYzMyZTczOC04MGM2YjdmYw==, ActorId: [9:7439873343281506807:2731], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:08:19.422627Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZmQ3ZDU3NGUtNDNmNTM5NWUtYzMyZTczOC04MGM2YjdmYw==, ActorId: [9:7439873343281506807:2731], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:19.422713Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=ZmQ3ZDU3NGUtNDNmNTM5NWUtYzMyZTczOC04MGM2YjdmYw==, ActorId: [9:7439873343281506807:2731], ActorState: unknown state, Session actor destroyed 2024-11-21T23:08:20.422995Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , Bootstrap. Database: /Root 2024-11-21T23:08:20.427179Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ= 2024-11-21T23:08:20.427591Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:08:20.428082Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T23:08:20.435016Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ReadyState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, received request, proxyRequestId: 77 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [9:7439873347576474153:2743] database: /Root databaseId: /Root pool id: default 2024-11-21T23:08:20.435071Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ReadyState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, request placed into pool from cache: default 2024-11-21T23:08:20.435154Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ReadyState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, Sending CompileQuery request 2024-11-21T23:08:20.439467Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, ExecutePhyTx, tx: 0x000050C0002AE098 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:20.439575Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, Sending to Executer TraceId: 0 8 2024-11-21T23:08:20.439685Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, Created new KQP executer: [9:7439873347576474156:2742] isRollback: 0 2024-11-21T23:08:20.454930Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T23:08:20.455042Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, ExecutePhyTx, tx: 0x000050C0002AD858 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:20.456321Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:08:20.456515Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, txInfo Status: Committed Kind: ReadOnly TotalDuration: 17.282 ServerDuration: 17.083 QueriesCount: 2 2024-11-21T23:08:20.456661Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:20.456733Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:20.456766Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, EndCleanup, isFinal: 0 2024-11-21T23:08:20.456835Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ExecuteState, TraceId: 01jd8fqm2j2dyn5x6gf9jg38na, Sent query response back to proxy, proxyRequestId: 77, proxyId: [9:7439873184367714449:2201] 2024-11-21T23:08:20.458120Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, TxId: 2024-11-21T23:08:20.458251Z node 9 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, TxId: 2024-11-21T23:08:20.465821Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:20.465884Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:20.465924Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:08:20.465959Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:20.466050Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=NjBlYjgwMmEtMmU0ZDZjZDMtZGI3NGUwYTktZWVkMTY2NGQ=, ActorId: [9:7439873347576474152:2742], ActorState: unknown state, Session actor destroyed 2024-11-21T23:08:20.471658Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MTRjYTFlNzYtNGYyODA1Zi05Yzk1NTI2OS0yYmQ0OWIxNg==, ActorId: [9:7439873201547584081:2302], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:20.471711Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MTRjYTFlNzYtNGYyODA1Zi05Yzk1NTI2OS0yYmQ0OWIxNg==, ActorId: [9:7439873201547584081:2302], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:20.471744Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTRjYTFlNzYtNGYyODA1Zi05Yzk1NTI2OS0yYmQ0OWIxNg==, ActorId: [9:7439873201547584081:2302], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:08:20.471773Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTRjYTFlNzYtNGYyODA1Zi05Yzk1NTI2OS0yYmQ0OWIxNg==, ActorId: [9:7439873201547584081:2302], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:20.471857Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTRjYTFlNzYtNGYyODA1Zi05Yzk1NTI2OS0yYmQ0OWIxNg==, ActorId: [9:7439873201547584081:2302], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2024-11-21T23:07:44.192879Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.192904Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.192941Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:44.193331Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:44.214635Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:44.214838Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.215253Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:44.215715Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.216468Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:44.217374Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:44.217425Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:44.218575Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.218595Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.218619Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:44.218937Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:44.226640Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:44.226813Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.227313Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:44.227731Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.227855Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:44.234506Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:44.234574Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:44.236972Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.236992Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.237016Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:44.237615Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:44.238582Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:44.238705Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.242621Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:44.243459Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.250692Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:44.254496Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:44.254579Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:44.259430Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.259462Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.266474Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:44.270672Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:44.271488Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:44.271643Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.274654Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:44.276274Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.276786Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:44.276871Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:44.276926Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:44.277902Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.277921Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.277938Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:44.278259Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:44.278926Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:44.279051Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.282597Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:44.283010Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.283128Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:44.285018Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:44.285071Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:07:44.285871Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.285898Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.285924Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:44.286525Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:44.290570Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:44.290690Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.295437Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:44.295891Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.296036Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:44.296103Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:44.296141Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:07:44.297083Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.297104Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.297122Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:44.297412Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:44.297983Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:44.298084Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.302745Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:44.303493Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.303672Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:44.304071Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:44.304112Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:07:44.304903Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.304925Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.304962Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:44.305182Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:44.305652Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:44.305745Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.305936Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:44.307509Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:44.308039Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:44.308113Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:44.308153Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:44.325787Z :ReadSession INFO: Random seed for debugging is 1732230464325744 2024-11-21T23:07:45.105925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873197167776257:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:45.106429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:45.170944Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873196821649361:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:45.171002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existen ... ession cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 got read request: guid# 203f3955-fa683843-190892b4-2a5a4cc 2024-11-21T23:08:08.159508Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2024-11-21T23:08:08.159649Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T23:08:08.159687Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T23:08:08.159726Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2024-11-21T23:08:08.177394Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:08:08.177434Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:08:08.177591Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_12755693865839162690_v1 2024-11-21T23:08:08.177743Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:08:08.188577Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:08:08.188639Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:08:08.188695Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T23:08:08.191012Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T23:08:08.191061Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2024-11-21T23:08:08.191111Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2024-11-21T23:08:08.194615Z :DEBUG: [/Root] [/Root] [68803854-99ebc063-5fd5d1cc-8c58dcbb] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2024-11-21T23:08:08.203464Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fa2388b0-bbdb814e-fe11f0a3-5fa608b5_0] Write session will now close 2024-11-21T23:08:08.203525Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fa2388b0-bbdb814e-fe11f0a3-5fa608b5_0] Write session: aborting 2024-11-21T23:08:08.204079Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fa2388b0-bbdb814e-fe11f0a3-5fa608b5_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:08:08.204121Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fa2388b0-bbdb814e-fe11f0a3-5fa608b5_0] Write session: destroy 2024-11-21T23:08:08.212279Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|fa2388b0-bbdb814e-fe11f0a3-5fa608b5_0 grpc read done: success: 0 data: 2024-11-21T23:08:08.212314Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|fa2388b0-bbdb814e-fe11f0a3-5fa608b5_0 grpc read failed 2024-11-21T23:08:08.212335Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|fa2388b0-bbdb814e-fe11f0a3-5fa608b5_0 grpc closed 2024-11-21T23:08:08.212350Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|fa2388b0-bbdb814e-fe11f0a3-5fa608b5_0 is DEAD 2024-11-21T23:08:08.213639Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:08:08.215094Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:08:08.215133Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439873295952026902:2584] destroyed 2024-11-21T23:08:08.215178Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:08:09.135030Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:08:10.338623Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T23:08:14.140117Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:08:18.138539Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T23:08:18.209042Z :INFO: [/Root] [/Root] [68803854-99ebc063-5fd5d1cc-8c58dcbb] Closing read session. Close timeout: 0.000000s 2024-11-21T23:08:18.209116Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T23:08:18.209167Z :INFO: [/Root] [/Root] [68803854-99ebc063-5fd5d1cc-8c58dcbb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16969 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:08:18.209270Z :NOTICE: [/Root] [/Root] [68803854-99ebc063-5fd5d1cc-8c58dcbb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:08:18.209424Z :DEBUG: [/Root] [/Root] [68803854-99ebc063-5fd5d1cc-8c58dcbb] [dc1] Abort session to cluster 2024-11-21T23:08:18.210046Z :NOTICE: [/Root] [/Root] [68803854-99ebc063-5fd5d1cc-8c58dcbb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:08:18.210892Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 grpc read done: success# 0, data# { } 2024-11-21T23:08:18.210929Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 grpc read failed 2024-11-21T23:08:18.210957Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 grpc closed 2024-11-21T23:08:18.210999Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_12755693865839162690_v1 is DEAD 2024-11-21T23:08:18.212401Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:08:18.212458Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_12755693865839162690_v1 2024-11-21T23:08:18.212495Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439873265887255323:2500] destroyed 2024-11-21T23:08:18.212557Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_12755693865839162690_v1 2024-11-21T23:08:18.213221Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439873265887255320:2497] disconnected; active server actors: 1 2024-11-21T23:08:18.213251Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439873265887255320:2497] client user disconnected session shared/user_1_1_12755693865839162690_v1 2024-11-21T23:08:18.695303Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439873338901700448:2677], TxId: 281474976720722, task: 1, CA Id [1:7439873338901700446:2677]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T23:08:18.731629Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439873338901700448:2677], TxId: 281474976720722, task: 1, CA Id [1:7439873338901700446:2677]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:08:18.786547Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439873338901700448:2677], TxId: 281474976720722, task: 1, CA Id [1:7439873338901700446:2677]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:08:18.859349Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439873338901700448:2677], TxId: 281474976720722, task: 1, CA Id [1:7439873338901700446:2677]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:08:18.945412Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439873338901700448:2677], TxId: 281474976720722, task: 1, CA Id [1:7439873338901700446:2677]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:08:19.070512Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439873338901700448:2677], TxId: 281474976720722, task: 1, CA Id [1:7439873338901700446:2677]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:08:20.018417Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:20.018454Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:20.018496Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:08:20.027210Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:08:20.027891Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:08:20.028088Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:20.030461Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:08:20.031305Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:08:20.031822Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:08:20.034620Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T23:08:20.034722Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:08:20.034791Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:08:20.034843Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T23:08:20.038670Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:08:20.038728Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:20.324426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:20.324555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:20.324609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:20.324650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:20.324701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:20.324753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:20.324827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:20.325178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:20.432683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:20.432757Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:20.463421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:20.467442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:20.467681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:20.487075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:20.489933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:20.490818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:20.491235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:20.503418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:20.504932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:20.505004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:20.505215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:20.508988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:20.509103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:20.509237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.533567Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:20.797589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:20.798003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.798320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:20.799033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:20.799158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.804675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:20.804895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:20.805201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.805293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:20.805346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:20.805392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:20.807723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.807833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:20.807876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:20.810223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.810284Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.810338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.810420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.820216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:20.826887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:20.827177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:20.828702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:20.828903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:20.828956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.829333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:20.829408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.829632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:20.829725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:20.832267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:20.832324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:20.832549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:20.832591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:20.833016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.833093Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:20.833226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:20.833270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.833319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:20.833360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.833398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:20.833429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:20.833498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:20.833539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:20.833579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:20.835633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:20.835744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:20.835788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:20.835831Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:20.835884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:20.835994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Step: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:21.371777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:08:21.372031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T23:08:21.372829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:21.372970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:21.373042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:08:21.373181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T23:08:21.373347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:21.542285Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:409:2382], attempt# 0 2024-11-21T23:08:21.643217Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:409:2382], sender# [1:408:2381] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:21700 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8C8AC233-166F-4216-B0F5-C762DBB8CD55 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T23:08:21.662889Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:21700 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7D3DFBCB-86DC-4E8C-9D4E-289A1E0765E1 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:08:21.671772Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:08:21.675374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:21.675443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:08:21.675798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:21.675854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:08:21.676697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.676769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:21.677547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:21.677651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:21.677698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:21.677748Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:08:21.677798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:21.677909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:21700 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CA8FEC68-9E47-4178-89A6-BFA2424EDEDC amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T23:08:21.679855Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2024-11-21T23:08:21.680206Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:408:2381] 2024-11-21T23:08:21.680466Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:409:2382], sender# [1:408:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T23:08:21.682772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:21700 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 57E9052F-24CD-4C67-802D-E35F89546583 amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2024-11-21T23:08:21.688293Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2024-11-21T23:08:21.688390Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:409:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:08:21.688582Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:408:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:08:21.711867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T23:08:21.711951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:08:21.712134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T23:08:21.712270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T23:08:21.712341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:21.712379Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.712415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:21.712472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:21.712687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:21.723424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.723917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.723968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:21.724080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:21.724117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:21.724166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:21.724256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T23:08:21.724332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:21.724405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:21.724472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:21.724607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:21.731558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:21.731634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:394:2368] TestWaitNotification: OK eventTxId 102 >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] Test command err: 2024-11-21T23:06:36.942821Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872900867285146:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:36.943089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002401/r3tmp/tmpYxZS40/pdisk_1.dat 2024-11-21T23:06:37.973613Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:37.987051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:37.987155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:37.990567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:38.000253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10201, node 1 2024-11-21T23:06:38.213155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:38.213187Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:38.213194Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:38.213291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:38.663830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:38.771401Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:41.548951Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:41.561743Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTU4Y2FhNTEtNjg3NjcwYzktZWI1ODUzZWItNDJiZTQ3M2Q=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTU4Y2FhNTEtNjg3NjcwYzktZWI1ODUzZWItNDJiZTQ3M2Q= 2024-11-21T23:06:41.575744Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:06:41.575793Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:41.575826Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:41.576020Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872922342122109:2302], Start check tables existence, number paths: 2 2024-11-21T23:06:41.576119Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTU4Y2FhNTEtNjg3NjcwYzktZWI1ODUzZWItNDJiZTQ3M2Q=, ActorId: [1:7439872922342122113:2303], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:41.579192Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872922342122109:2302], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:41.579289Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872922342122109:2302], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:41.580715Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872922342122109:2302], Successfully finished 2024-11-21T23:06:41.580821Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:41.611379Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872922342122130:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:41.617426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:41.625181Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872922342122130:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2024-11-21T23:06:41.628438Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872922342122130:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:06:41.640435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872922342122130:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:06:41.723767Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872922342122130:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:41.730346Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872922342122130:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:06:41.738534Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872922342122188:2338], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:41.739940Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872922342122188:2338], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:41.791759Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OTU4Y2FhNTEtNjg3NjcwYzktZWI1ODUzZWItNDJiZTQ3M2Q=, ActorId: [1:7439872922342122113:2303], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:06:41.791835Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=OTU4Y2FhNTEtNjg3NjcwYzktZWI1ODUzZWItNDJiZTQ3M2Q=, ActorId: [1:7439872922342122113:2303], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:06:41.791865Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTU4Y2FhNTEtNjg3NjcwYzktZWI1ODUzZWItNDJiZTQ3M2Q=, ActorId: [1:7439872922342122113:2303], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:06:41.791890Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTU4Y2FhNTEtNjg3NjcwYzktZWI1ODUzZWItNDJiZTQ3M2Q=, ActorId: [1:7439872922342122113:2303], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:06:41.791992Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTU4Y2FhNTEtNjg3NjcwYzktZWI1ODUzZWItNDJiZTQ3M2Q=, ActorId: [1:7439872922342122113:2303], ActorState: unknown state, Session actor destroyed 2024-11-21T23:06:41.855012Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872900867285146:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:41.855120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:06:42.915831Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439872929645997022:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002401/r3tmp/tmphObJMj/pdisk_1.dat 2024-11-21T23:06:43.102613Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:06:43.251042Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:43.287944Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:43.288028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:43.295906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4029, node 2 2024-11-21T23:06:43.526980Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:43.527008Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:43.527018Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:43.527124Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:43.975794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:43.986936Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:06:44.014899Z node 2 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available ... 0 8 2024-11-21T23:08:15.752772Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfeh3qjy2ykjrf92g713, Created new KQP executer: [8:7439873325454889553:4591] isRollback: 0 2024-11-21T23:08:15.792231Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfeh3qjy2ykjrf92g713, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:08:15.792451Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfeh3qjy2ykjrf92g713, txInfo Status: Committed Kind: ReadWrite TotalDuration: 93.708 ServerDuration: 93.568 QueriesCount: 2 2024-11-21T23:08:15.792575Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfeh3qjy2ykjrf92g713, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:15.792648Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfeh3qjy2ykjrf92g713, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:15.792681Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfeh3qjy2ykjrf92g713, EndCleanup, isFinal: 0 2024-11-21T23:08:15.792741Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfeh3qjy2ykjrf92g713, Sent query response back to proxy, proxyRequestId: 478, proxyId: [8:7439873041987040198:2160] 2024-11-21T23:08:15.793308Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, TxId: 2024-11-21T23:08:15.793405Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2024-11-21T23:08:15.793990Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ReadyState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, received request, proxyRequestId: 479 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [8:7439873325454889562:4597] database: /Root databaseId: /Root pool id: default 2024-11-21T23:08:15.794022Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ReadyState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, request placed into pool from cache: default 2024-11-21T23:08:15.794092Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ReadyState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, Sending CompileQuery request 2024-11-21T23:08:15.796862Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, ExecutePhyTx, tx: 0x000050C00035F698 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:15.796931Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, Sending to Executer TraceId: 0 8 2024-11-21T23:08:15.797000Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, Created new KQP executer: [8:7439873325454889565:4591] isRollback: 0 2024-11-21T23:08:15.923442Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2024-11-21T23:08:15.923521Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, ExecutePhyTx, tx: 0x000050C00035F5D8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:15.949632Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:08:15.949799Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, txInfo Status: Committed Kind: ReadOnly TotalDuration: 153.074 ServerDuration: 152.947 QueriesCount: 2 2024-11-21T23:08:15.949918Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:15.949977Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:15.950004Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, EndCleanup, isFinal: 0 2024-11-21T23:08:15.950055Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ExecuteState, TraceId: 01jd8fqfhh99a037rq1s7ds36j, Sent query response back to proxy, proxyRequestId: 479, proxyId: [8:7439873041987040198:2160] 2024-11-21T23:08:15.950870Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, TxId: 2024-11-21T23:08:15.950966Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, TxId: 2024-11-21T23:08:15.951162Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7439873067756844295:2304], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2024-11-21T23:08:15.951206Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:15.951246Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:15.951279Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:08:15.951311Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:15.951391Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NzRmY2U5ZjktMWU2MDQwNzktNGZjZTQyMDMtM2JmN2JlNmQ=, ActorId: [8:7439873325454889533:4591], ActorState: unknown state, Session actor destroyed 2024-11-21T23:08:16.107556Z node 8 :BS_PROXY_PUT ERROR: [a1831e176d9c009a] Result# TEvPutResult {Id# [72075186224037888:1:715:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:08:16.179450Z node 7 :BS_PROXY_PUT ERROR: [53b3ad4ccaecc0a8] Result# TEvPutResult {Id# [72075186224037889:1:818:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:08:20.214523Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7439873070334181100:2329], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh 2024-11-21T23:08:20.214641Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7439873070334181100:2329], DatabaseId: /Root, PoolId: sample_pool_id, Updated node info, noode count: 3 |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestZigZagCodec [GOOD] Test command err: Size: 8002 Create chunk: 0.004769s Read by index: 0.000028s Iterate: 0.000167s Size: 8256 Create chunk: 0.000219s Read by index: 0.000036s Iterate: 0.000085s Size: 8532 Create chunk: 0.000104s Read by index: 0.000053s Iterate: 0.000034s Size: 7769 Create chunk: 0.000111s Read by index: 0.000049s Iterate: 0.000043s Size: 2853 Create chunk: 0.000109s Read by index: 0.000083s Iterate: 0.000040s Size: 2419 Create chunk: 0.000100s Read by index: 0.000121s Iterate: 0.000056s Size: 2929 Create chunk: 0.000085s Read by index: 0.000074s Iterate: 0.000042s Size: 2472 Create chunk: 0.000112s Read by index: 0.000090s Iterate: 0.000044s >> TBackupTests::BackupUuidColumn[Raw] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] Test command err: 2024-11-21T23:07:36.631317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873158718752306:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:36.631365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd2/r3tmp/tmpA5Ko5F/pdisk_1.dat 2024-11-21T23:07:37.527612Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5157, node 1 2024-11-21T23:07:37.562414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:07:37.567545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:37.568287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:07:37.577379Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:37.577402Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:37.581779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:37.581874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:37.587800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:37.588287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:37.588330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:37.588378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:37.588436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:07:37.629903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:37.666915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:37.666939Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:37.666967Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:37.667072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:38.028193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.034562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:38.034645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.040857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:38.041068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:38.041089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:38.043564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:38.043595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:38.045226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:38.048578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:38.051702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230458098, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:38.051758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:38.052081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:38.054661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:38.054884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.055019Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:38.055114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:38.055163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:38.055215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:38.061063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:38.061131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:38.061148Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:38.061270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:41.107104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Logs, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.107882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:41.109389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:41.109423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.123753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Logs 2024-11-21T23:07:41.123999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:41.124261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:41.124384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:41.128447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:41.128498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:41.128520Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:41.129093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:41.129114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:41.129127Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:41.132015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:41.161474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.161847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.162085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.162217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.162330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.162831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.162975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.163080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.163213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.163388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:41.163544Z node 1 ... RD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:08:19.843272Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 1 2024-11-21T23:08:19.843428Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T23:08:19.852005Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230499888, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:19.852070Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710660:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230499888, at schemeshard: 72057594046644480 2024-11-21T23:08:19.852256Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:0 128 -> 240 2024-11-21T23:08:19.852411Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710660:1 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230499888, at schemeshard: 72057594046644480 2024-11-21T23:08:19.852484Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:1 128 -> 240 2024-11-21T23:08:19.852550Z node 10 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710660:2 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230499888, at schemeshard: 72057594046644480 2024-11-21T23:08:19.852598Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:2 128 -> 240 2024-11-21T23:08:19.852675Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TCreateResourcePool TPropose, operationId: 281474976710660:3, HandleReply TEvOperationPlan: step# 1732230499888 2024-11-21T23:08:19.852735Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:3 128 -> 240 2024-11-21T23:08:19.855870Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:19.856508Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:19.856594Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:2 ProgressState 2024-11-21T23:08:19.856705Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:2 progress is 1/4 2024-11-21T23:08:19.856934Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:0 ProgressState 2024-11-21T23:08:19.856986Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:0 progress is 2/4 2024-11-21T23:08:19.857077Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:3 ProgressState 2024-11-21T23:08:19.857118Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:3 progress is 3/4 2024-11-21T23:08:19.857196Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710660:1 ProgressState 2024-11-21T23:08:19.857233Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710660:1 progress is 4/4 2024-11-21T23:08:19.857261Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2024-11-21T23:08:19.857298Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 2024-11-21T23:08:19.857313Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:2 2024-11-21T23:08:19.857325Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:3 2024-11-21T23:08:19.857346Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710660, publications: 5, subscribers: 1 2024-11-21T23:08:19.862704Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:08:19.862780Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 5, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:08:19.862811Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2024-11-21T23:08:19.863129Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:08:19.863150Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:08:19.863162Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:08:19.863286Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:08:19.863330Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:08:19.863342Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 5 2024-11-21T23:08:19.863467Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:08:19.863488Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:08:19.865973Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 5 2024-11-21T23:08:19.866283Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710660 2024-11-21T23:08:19.866331Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710660 2024-11-21T23:08:19.866345Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 2 2024-11-21T23:08:19.866426Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710660, subscribers: 1 2024-11-21T23:08:19.871615Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7439873343718760131:2386], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T23:08:19.928241Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710661:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:08:19.928472Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:08:19.936654Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin 2024-11-21T23:08:20.000611Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[10:7439873343718759902:2305];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T23:08:20.321713Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jd8fqked68c80j9b68dbwje7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YThhYjI3ZDMtNzY5ZTQzNjAtNGRjZmZhNDMtNmUwOWZjZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:20.499804Z node 10 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976710663 scanId: 1 version: {1732230500007:max} readable: {1732230500364:max} at tablet 72075186224037888 2024-11-21T23:08:20.500022Z node 10 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976710663 scanId: 1 at tablet 72075186224037888 2024-11-21T23:08:20.500703Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[10:7439873343718759902:2305];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710663;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1732230500007:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2024-11-21T23:08:20.515916Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[10:7439873343718759902:2305];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T23:08:20.842210Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[10:7439873343718759902:2305];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976710663;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1732230500007:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[date;dateTimeS;dateTimeU;id;stringToString;timestamp;utf8ToString;];};]; 2024-11-21T23:08:20.851647Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[10:7439873343718759902:2305];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2024-11-21T23:08:20.853728Z node 10 :TX_COLUMNSHARD INFO: self_id=[10:7439873343718759930:2310];tablet_id=72075186224037888;parent=[10:7439873343718759902:2305];fline=manager.h:99;event=ask_data;request=request_id=4;3={portions_count=1};; 2024-11-21T23:08:20.855046Z node 10 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2024-11-21T23:08:20.860325Z node 10 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T23:08:20.881863Z node 10 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2024-11-21T23:08:20.921747Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230500007, txId: 18446744073709551615] shutting down 2024-11-21T23:08:21.014592Z node 10 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[10:7439873343718759902:2305];fline=actor.cpp:33;event=skip_flush_writing; >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TKesusTest::TestUnregisterProxy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:22.454894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:22.454996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:22.455049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:22.455092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:22.455146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:22.455182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:22.455244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:22.455644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:22.534140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:22.534197Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:22.577297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:22.581263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:22.581484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:22.624415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:22.630812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:22.631377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:22.631733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:22.642755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:22.644173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:22.644248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:22.644483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:22.644553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:22.644618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:22.644717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:22.658633Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:22.796293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:22.796632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:22.796868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:22.797126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:22.797193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:22.813066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:22.813253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:22.813476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:22.813542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:22.813573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:22.813603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:22.827367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:22.827445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:22.827486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:22.835316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:22.835413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:22.835479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:22.835542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:22.839221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:22.847377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:22.847616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:22.848785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:22.848953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:22.849002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:22.849294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:22.849372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:22.849554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:22.849650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:22.854381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:22.854470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:22.854703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:22.854750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:22.855155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:22.855222Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:22.855323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:22.855373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:22.855421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:22.855468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:22.855507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:22.855543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:22.855626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:22.855662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:22.855695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:22.862483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:22.862689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:22.862756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:22.862817Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:22.862863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:22.863011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... xId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:23.279885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:08:23.280020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T23:08:23.280741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:23.280859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:23.280918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:08:23.281033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T23:08:23.281178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:23.485114Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:405:2378], attempt# 0 2024-11-21T23:08:23.503407Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:405:2378], sender# [1:404:2377] 2024-11-21T23:08:23.511429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:23.511505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:3331 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5412D775-A2A9-4511-A111-13405777CFEF amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T23:08:23.512802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:23.512865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:08:23.513423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:23.513485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:23.514179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:23.514292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:23.514342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:23.514382Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:08:23.514436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:23.514512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T23:08:23.514733Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:3331 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 178754BF-49B6-414F-9A04-81C31B6ADCD6 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:08:23.534499Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } 2024-11-21T23:08:23.536445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:3331 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5D6BB8F4-8595-45FE-A7E4-BBB9FDA81630 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T23:08:23.546825Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T23:08:23.546956Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:404:2377] 2024-11-21T23:08:23.547063Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:405:2378], sender# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:3331 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 02B0FAF0-6105-4FEA-AEDF-63A77B73B532 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2024-11-21T23:08:23.550645Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:405:2378], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2024-11-21T23:08:23.550747Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:405:2378], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:08:23.550922Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:404:2377], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:08:23.560560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:23.560650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:08:23.560844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:23.560959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:23.561051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:23.561086Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:23.561123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:23.561162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:23.561401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:23.569900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:23.570128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:23.570173Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:23.570321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:23.570364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:23.570432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:23.570551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T23:08:23.570606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:23.570644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:23.570677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:23.570823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:23.580993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:23.581062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:390:2364] TestWaitNotification: OK eventTxId 102 |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_block42 [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestAcquireLocks >> TKesusTest::TestAcquireUpgrade |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> TTableProfileTests::WrongTableProfile >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TSchemeShardMoveTest::Replace [GOOD] >> TKesusTest::TestAttachNewSessions >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestAttachOutOfSequence >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:23.811404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:23.811521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:23.811572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:23.811610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:23.811658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:23.811684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:23.811746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:23.812092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:23.889622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:23.889693Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:23.924913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:23.929109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:23.929327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:23.960812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:23.981600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:23.982372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:23.982919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:24.017058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:24.018998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:24.019101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:24.019360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:24.019460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:24.019521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:24.019663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.039522Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:24.268053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:24.268352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.268642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:24.268919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:24.268974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.274295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:24.274481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:24.274725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.274794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:24.274859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:24.274913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:24.277529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.277601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:24.277639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:24.280019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.280099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.280167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:24.280241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:24.286071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:24.290630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:24.290896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:24.291983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:24.292143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:24.292204Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:24.292516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:24.292567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:24.292748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:24.292841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:24.295338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:24.295382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:24.295663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:24.295708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:24.296123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.296185Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:24.296289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:24.296320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:24.296374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:24.296432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:24.296484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:24.296514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:24.296589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:24.296684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:24.296719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:24.302835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:24.303012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:24.303342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:24.303397Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:24.303437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:24.303561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T23:08:26.515503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T23:08:26.515741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2024-11-21T23:08:26.516359Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 2024-11-21T23:08:26.516641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.516811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 Forgetting tablet 72075186233409547 2024-11-21T23:08:26.522767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409546 disconnected Forgetting tablet 72075186233409548 2024-11-21T23:08:26.523176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409547 disconnected 2024-11-21T23:08:26.523669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409548 disconnected 2024-11-21T23:08:26.525014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:08:26.525084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1392:3164] 2024-11-21T23:08:26.525326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046678944 2024-11-21T23:08:26.525425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:08:26.525473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2024-11-21T23:08:26.525564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2024-11-21T23:08:26.525610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2024-11-21T23:08:26.525642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 2024-11-21T23:08:26.525673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2024-11-21T23:08:26.525703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2024-11-21T23:08:26.525724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2024-11-21T23:08:26.525750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2024-11-21T23:08:26.525774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2024-11-21T23:08:26.525803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:26.528805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T23:08:26.528854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2024-11-21T23:08:26.529213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T23:08:26.529234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T23:08:26.529330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T23:08:26.529356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2024-11-21T23:08:26.529694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2024-11-21T23:08:26.530220Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T23:08:26.530302Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T23:08:26.530386Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2024-11-21T23:08:26.531050Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:26.531242Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 237us result status StatusPathDoesNotExist 2024-11-21T23:08:26.531403Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:08:26.531958Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:26.532180Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 253us result status StatusSuccess 2024-11-21T23:08:26.532708Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:26.533661Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:26.533843Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 200us result status StatusSuccess 2024-11-21T23:08:26.534179Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 21 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 21 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 19 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestKesusConfig >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionStealLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:25.257279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:25.257375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:25.257426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:25.257476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:25.257518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:25.257561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:25.257625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:25.258741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:25.351094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:25.351150Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:25.397727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:25.401878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:25.402124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:25.427384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:25.428231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:25.428983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:25.429289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:25.434508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:25.435982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:25.436053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:25.436281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:25.436363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:25.436412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:25.436556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.444574Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:25.606997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:25.607280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.607540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:25.607774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:25.607829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.612029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:25.612234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:25.612474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.612544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:25.612586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:25.612619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:25.614920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.614988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:25.615030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:25.617186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.617259Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.617312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:25.617383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:25.637715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:25.651382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:25.651665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:25.652921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:25.653074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:25.653117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:25.653420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:25.653495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:25.653682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:25.653770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:25.656123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:25.656168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:25.656354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:25.656404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:25.656830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.656881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:25.656991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:25.657038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:25.657089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:25.657142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:25.657177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:25.657207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:25.657282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:25.657327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:25.657377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:25.659452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:25.659568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:25.659610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:25.659646Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:25.659684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:25.659798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 4, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:08:26.488630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:26.488692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:08:26.489391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.489447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:26.489976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:26.490062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:26.490096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:26.490134Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:08:26.490181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:08:26.490268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:26.495363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:16519 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EB1A7058-5639-45BB-82C0-BF7A42C7F66A amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:08:26.501761Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:475:2435], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2024-11-21T23:08:26.501831Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:475:2435], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:08:26.501969Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:474:2433], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:08:26.508444Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:16519 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DD087B08-F72A-40DF-A637-72486AA21F6D amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2024-11-21T23:08:26.519649Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2024-11-21T23:08:26.519759Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:472:2432] 2024-11-21T23:08:26.519892Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:473:2434], sender# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:16519 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9F71775D-0B80-453E-9995-F8B373AAF8EF amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2024-11-21T23:08:26.530043Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:473:2434], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2024-11-21T23:08:26.530130Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:473:2434], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:08:26.530329Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:472:2432], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:08:26.548845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.548932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:26.549109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.549243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.549313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.549449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:26.553043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.575756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.575823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:08:26.575995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.576103Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 319 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.576158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.576203Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.576264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:26.576305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:26.576331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:26.576544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:26.578711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.579184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.579248Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:26.579364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:26.579400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:26.579451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:26.579527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:379:2342] message: TxId: 102 2024-11-21T23:08:26.579606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:26.579647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:26.579677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:26.579825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:26.582123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:26.582187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:449:2411] TestWaitNotification: OK eventTxId 102 >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |80.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup-EvWrite >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:25.973019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:25.973166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:25.973263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:25.973309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:25.973364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:25.973418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:25.973495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:25.973891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:26.094936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:26.095013Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:26.127777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:26.134237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:26.134523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:26.174408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:26.178376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:26.179534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.180028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:26.186917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:26.188683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:26.188777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:26.189053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:26.189153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:26.189233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:26.189339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.198245Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:26.377856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:26.378143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.378426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:26.378702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:26.378781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.381483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.381678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:26.381912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.381994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:26.382034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:26.382074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:26.384506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.384604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:26.384653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:26.386765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.386856Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.386923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:26.386984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:26.391399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:26.393926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:26.394178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:26.395367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.395542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:26.395599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:26.395920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:26.395994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:26.396200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:26.396291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:26.404038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:26.404112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:26.404319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:26.404363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:26.404813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.404881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:26.405007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:26.405062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:26.405113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:26.405159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:26.405205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:26.405236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:26.405327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:26.405367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:26.405404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:26.411834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:26.412031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:26.412085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:26.412131Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:26.412179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:26.412348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:26.741036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:08:26.741197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T23:08:26.741864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.741997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:26.742059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:08:26.742200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T23:08:26.742358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:26.936951Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:409:2382], attempt# 0 2024-11-21T23:08:27.018922Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:409:2382], sender# [1:408:2381] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:18022 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E74A1D04-7581-4DC8-B080-B650AA43B749 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T23:08:27.027493Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:18022 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C6B34EF8-A065-4905-AD15-294CF8B5128C amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:08:27.037213Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:08:27.038658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:27.038747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:08:27.039063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:27.039116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:08:27.039413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:27.039477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:27.040920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:27.041040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:27.041101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:27.041153Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:08:27.041196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:27.041286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:18022 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 68F2338D-B093-4B9B-9BC2-27B1C20BC390 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T23:08:27.044426Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2024-11-21T23:08:27.044869Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:408:2381] 2024-11-21T23:08:27.045002Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:409:2382], sender# [1:408:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } 2024-11-21T23:08:27.049067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:18022 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5EDA2DC7-C203-44F9-A232-109815408CB8 amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2024-11-21T23:08:27.053871Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2024-11-21T23:08:27.053968Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:409:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:08:27.054185Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:408:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:08:27.080930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T23:08:27.081016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:08:27.081221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T23:08:27.081362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2024-11-21T23:08:27.081448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:27.081498Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:27.081541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:27.081588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:27.081821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:27.088299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:27.088798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:27.088862Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:27.088999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:27.089045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:27.089106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:27.089208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T23:08:27.089305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:27.089373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:27.089418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:27.089560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:27.094486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:27.094554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:394:2368] TestWaitNotification: OK eventTxId 102 >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:24.598451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:24.598566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:24.598631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:24.598673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:24.598740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:24.598776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:24.598840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:24.599211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:24.688302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:24.688385Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:24.710224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:24.714234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:24.715185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:24.733335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:24.735161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:24.735669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:24.735941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:24.740388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:24.741819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:24.741877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:24.742032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:24.742084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:24.742131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:24.742240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.749127Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:24.896632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:24.896881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.897106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:24.897340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:24.897395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.912387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:24.912567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:24.912796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.912877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:24.912924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:24.912962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:24.925356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.925472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:24.925521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:24.935926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.936041Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.936129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:24.936228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:24.940850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:24.944000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:24.944280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:24.945441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:24.945600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:24.945658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:24.945932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:24.945984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:24.946148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:24.946231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:24.950727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:24.950779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:24.951060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:24.951108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:24.951506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:24.951562Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:24.951660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:24.951697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:24.951745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:24.951793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:24.951829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:24.951860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:24.951955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:24.952049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:24.952082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:24.954003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:24.954127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:24.954235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:24.954275Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:24.954315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:24.962221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... let# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 861 } } 2024-11-21T23:08:27.776646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1247 } } 2024-11-21T23:08:27.776680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:27.776775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1247 } } 2024-11-21T23:08:27.776844Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1247 } } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:27.777721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:27.777780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2024-11-21T23:08:27.777933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:27.777995Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:27.778083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 8589936894 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:27.778181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:27.778225Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T23:08:27.778263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:27.778300Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2024-11-21T23:08:27.780349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 8589936898 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:27.780396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:27.780510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 8589936898 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:27.780546Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:27.780630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 323 RawX2: 8589936898 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:27.780685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:27.780716Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:27.780746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:27.780780Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:27.791310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T23:08:27.792347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:27.793910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T23:08:27.794305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:27.794541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T23:08:27.794594Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:27.794646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T23:08:27.794764Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2024-11-21T23:08:27.794804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2024-11-21T23:08:27.794856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2024-11-21T23:08:27.795171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:27.795204Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:27.795237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T23:08:27.795297Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2024-11-21T23:08:27.795321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2024-11-21T23:08:27.795354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2024-11-21T23:08:27.795431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:380:2345] message: TxId: 102 2024-11-21T23:08:27.795481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2024-11-21T23:08:27.795525Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:27.795570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:27.795695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T23:08:27.795730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:08:27.795777Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2024-11-21T23:08:27.795799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2024-11-21T23:08:27.795829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T23:08:27.795851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:08:27.795874Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2024-11-21T23:08:27.795892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2024-11-21T23:08:27.795934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T23:08:27.795960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T23:08:27.796265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:08:27.796315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:08:27.796386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:08:27.796427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:08:27.796459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:08:27.796507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:08:27.796537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:27.815751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:27.815828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:463:2427] 2024-11-21T23:08:27.815951Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:25.717553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:25.717640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:25.717677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:25.717726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:25.717780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:25.717815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:25.717870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:25.718209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:25.791013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:25.791076Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:25.827265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:25.831153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:25.831362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:25.848225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:25.848965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:25.849572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:25.849881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:25.859291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:25.860744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:25.860817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:25.861013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:25.861118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:25.861173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:25.861277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.877378Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:26.164283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:26.164594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.164864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:26.165103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:26.165160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.176153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.176373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:26.176649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.176722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:26.176756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:26.176792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:26.185340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.185467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:26.185520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:26.187755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.187827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.187912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:26.187971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:26.191576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:26.193636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:26.193878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:26.195029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.195183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:26.195230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:26.195499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:26.195569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:26.195755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:26.195839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:26.198068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:26.198118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:26.198315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:26.198361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:26.198813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.198881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:26.198988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:26.199044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:26.199096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:26.199135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:26.199171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:26.199198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:26.199264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:26.199313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:26.199343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:26.201106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:26.201232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:26.201270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:26.201311Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:26.201348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:26.201448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Step: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:26.627358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:08:26.627513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T23:08:26.628255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.628373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:26.628430Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:08:26.628603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T23:08:26.628762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:26.803268Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:409:2382], attempt# 0 2024-11-21T23:08:26.904373Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:409:2382], sender# [1:408:2381] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:14717 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 379E16DF-15A5-4087-AD05-52181F4ED665 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T23:08:26.915676Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:08:26.920055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:26.920124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:08:26.920435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:26.920508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:08:26.920667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.920726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:26.922080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:26.922205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:26.922257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:26.922299Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:08:26.922344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:26.922466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:14717 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AEC1B3DD-3E17-4FD6-A3EE-7CEF285DDC62 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:08:26.925930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:08:26.930739Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:14717 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2B458A92-5D62-476D-B20D-FDDA0944CE4E amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T23:08:26.936005Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T23:08:26.936116Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:408:2381] 2024-11-21T23:08:26.936347Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:409:2382], sender# [1:408:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:14717 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 61DBA3EB-E0C9-494E-A851-F1E244E4D15C amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2024-11-21T23:08:26.940363Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:409:2382], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2024-11-21T23:08:26.940446Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:409:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:08:26.940702Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:408:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:08:26.955070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.955161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:08:26.955360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.955495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:08:26.955579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:26.955628Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.955670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:26.955721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:26.955919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:26.958366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.958823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:26.958879Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:26.958999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:26.959044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:26.959108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:26.959213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T23:08:26.959274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:26.959319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:26.959349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:26.959483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:26.961742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:26.961805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:394:2368] TestWaitNotification: OK eventTxId 102 >> DataShardTxOrder::ForceOnlineBetweenOnline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] Test command err: 2024-11-21T23:06:39.352515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872916181293365:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:39.352560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023e4/r3tmp/tmpEkypMV/pdisk_1.dat 2024-11-21T23:06:40.385147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:40.983992Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:41.004915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:41.005011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:41.031869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16717, node 1 2024-11-21T23:06:41.458612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:41.458642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:41.458652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:41.458790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:42.331121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:42.488959Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:44.354620Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872916181293365:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:44.354719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:06:45.447058Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:45.447490Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:45.447502Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:45.447571Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2024-11-21T23:06:45.449878Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2JjNjZkMGEtOGNlYjczYzUtZmQ5NzE3ODItOTcyZDhmNzk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Y2JjNjZkMGEtOGNlYjczYzUtZmQ5NzE3ODItOTcyZDhmNzk= 2024-11-21T23:06:45.461466Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872941951098009:2304], Start check tables existence, number paths: 2 2024-11-21T23:06:45.461554Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2JjNjZkMGEtOGNlYjczYzUtZmQ5NzE3ODItOTcyZDhmNzk=, ActorId: [1:7439872941951098012:2305], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:45.463120Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872941951098009:2304], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:45.463179Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872941951098009:2304], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:45.463208Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872941951098009:2304], Successfully finished 2024-11-21T23:06:45.463325Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:45.532205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:06:45.587028Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439872942370813622:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:45.698835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:45.698920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:45.737817Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:06:45.740968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:45.753582Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:06:46.076620Z node 3 :STATISTICS WARN: [72075186224037897] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T23:06:46.087559Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:46.299796Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:46.299866Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:46.351710Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:46.570382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:06:46.884312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:46.884421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:46.907494Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:06:46.908455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:47.370576Z node 2 :STATISTICS WARN: [72075186224037907] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T23:06:47.372396Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:47.577826Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:47.579821Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:47.590564Z node 2 :HIVE WARN: HIVE#72075186224037898 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:47.707540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:06:47.772697Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:06:47.772916Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:06:47.772995Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:06:47.773091Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:06:47.773150Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:06:47.773208Z node 2 :HIVE WARN: HIVE#72075186224037898 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:06:47.943264Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:50.556203Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439872942370813622:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:50.556269Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:06:53.342034Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:53.342101Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:53.342124Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439872975838516427:2346], Start check tables existence, number paths: 2 2024-11-21T23:06:53.342179Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:53.342902Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7439872975838516440:2348], Database: /Root/test-serverless, Start database fetching 2024-11-21T23:06:53.343434Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7439872975838516440:2348], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2024-11-21T23:06:53.345447Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=ZjhjOGVjZWEtNDQ5ODRkZDctMTE1YjEwOWMtNjk2YjBhM2Q=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjhjOGVjZWEtNDQ5ODRkZDctMTE1YjEwOWMtNjk2YjBhM2Q= 2024-11-21T23:06:53.345720Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439872975838516427:2346], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:53.345778Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439872975838516427:2346], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:53.345810Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadServic ... rId: [7:7439873368133118223:2873], ActorState: ExecuteState, TraceId: 01jd8fqs927mx8wdmzvtv6wh9p, txInfo Status: Committed Kind: Pure TotalDuration: 70.342 ServerDuration: 70.233 QueriesCount: 2 2024-11-21T23:08:26.074866Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZjQ1Y2VhOTYtNTY2NTdjYS01ZTIxMDM5OC02MzRiODZkYg==, ActorId: [7:7439873368133118223:2873], ActorState: ExecuteState, TraceId: 01jd8fqs927mx8wdmzvtv6wh9p, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:26.075141Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ZjQ1Y2VhOTYtNTY2NTdjYS01ZTIxMDM5OC02MzRiODZkYg==, ActorId: [7:7439873368133118223:2873], ActorState: ExecuteState, TraceId: 01jd8fqs927mx8wdmzvtv6wh9p, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:26.075202Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZjQ1Y2VhOTYtNTY2NTdjYS01ZTIxMDM5OC02MzRiODZkYg==, ActorId: [7:7439873368133118223:2873], ActorState: ExecuteState, TraceId: 01jd8fqs927mx8wdmzvtv6wh9p, EndCleanup, isFinal: 1 2024-11-21T23:08:26.075301Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZjQ1Y2VhOTYtNTY2NTdjYS01ZTIxMDM5OC02MzRiODZkYg==, ActorId: [7:7439873368133118223:2873], ActorState: ExecuteState, TraceId: 01jd8fqs927mx8wdmzvtv6wh9p, Sent query response back to proxy, proxyRequestId: 94, proxyId: [7:7439873265053900361:2085] 2024-11-21T23:08:26.075346Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZjQ1Y2VhOTYtNTY2NTdjYS01ZTIxMDM5OC02MzRiODZkYg==, ActorId: [7:7439873368133118223:2873], ActorState: unknown state, TraceId: 01jd8fqs927mx8wdmzvtv6wh9p, Cleanup temp tables: 0 2024-11-21T23:08:26.076841Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ZjQ1Y2VhOTYtNTY2NTdjYS01ZTIxMDM5OC02MzRiODZkYg==, ActorId: [7:7439873368133118223:2873], ActorState: unknown state, TraceId: 01jd8fqs927mx8wdmzvtv6wh9p, Session actor destroyed 2024-11-21T23:08:26.087187Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, acquire mvcc snapshot 2024-11-21T23:08:26.087374Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTg4ZTAwOWYtOTYzYzcyZjAtNjAzNzFmZmItZDNjMjBlM2M=, ActorId: [7:7439873290823704727:2305], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:26.087416Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=OTg4ZTAwOWYtOTYzYzcyZjAtNjAzNzFmZmItZDNjMjBlM2M=, ActorId: [7:7439873290823704727:2305], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:26.087444Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTg4ZTAwOWYtOTYzYzcyZjAtNjAzNzFmZmItZDNjMjBlM2M=, ActorId: [7:7439873290823704727:2305], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:08:26.087812Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTg4ZTAwOWYtOTYzYzcyZjAtNjAzNzFmZmItZDNjMjBlM2M=, ActorId: [7:7439873290823704727:2305], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:26.087901Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=OTg4ZTAwOWYtOTYzYzcyZjAtNjAzNzFmZmItZDNjMjBlM2M=, ActorId: [7:7439873290823704727:2305], ActorState: unknown state, Session actor destroyed 2024-11-21T23:08:26.099258Z node 7 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, read snapshot result: UNAVAILABLE, step: 1732230506000, tx id: 18446744073709551615 2024-11-21T23:08:26.099339Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, ExecutePhyTx, tx: 0x000050C000195058 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:26.099385Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, Sending to Executer TraceId: 0 8 2024-11-21T23:08:26.099464Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, Created new KQP executer: [7:7439873372428085550:2868] isRollback: 0 2024-11-21T23:08:26.108778Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T23:08:26.108931Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 21.754 QueriesCount: 2 2024-11-21T23:08:26.109123Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:26.109474Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:26.109507Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, EndCleanup, isFinal: 0 2024-11-21T23:08:26.109567Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ExecuteState, TraceId: 01jd8fqs6pfawkf711eeceqc5s, Sent query response back to proxy, proxyRequestId: 93, proxyId: [7:7439873265053900361:2085] 2024-11-21T23:08:26.111057Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:08:26.111149Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ReadyState, Sending to Executer TraceId: 0 8 2024-11-21T23:08:26.111224Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ReadyState, Created new KQP executer: [7:7439873372428085561:2868] isRollback: 1 2024-11-21T23:08:26.111272Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:26.111826Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: CleanupState, EndCleanup, isFinal: 1 2024-11-21T23:08:26.111885Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:08:26.112025Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YTU5ZmIxYmUtOGM0OGEyMWYtYTNjYjAzMmYtY2RjYWJmNDE=, ActorId: [7:7439873368133118209:2868], ActorState: unknown state, Session actor destroyed 2024-11-21T23:08:26.362773Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, acquire mvcc snapshot 2024-11-21T23:08:26.367204Z node 7 :KQP_SESSION TRACE: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, read snapshot result: UNAVAILABLE, step: 1732230506146, tx id: 18446744073709551615 2024-11-21T23:08:26.367282Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, ExecutePhyTx, tx: 0x000050C0000A4158 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2024-11-21T23:08:26.367327Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, Sending to Executer TraceId: 0 8 2024-11-21T23:08:26.367419Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, Created new KQP executer: [7:7439873372428085574:2876] isRollback: 0 2024-11-21T23:08:26.373711Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T23:08:26.373822Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 11.065 QueriesCount: 2 2024-11-21T23:08:26.373923Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:08:26.374214Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:26.374233Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, EndCleanup, isFinal: 0 2024-11-21T23:08:26.374278Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=YjVjYWRjNmItZDhjNjYxMzEtYWZkMDEzMDAtOWM4YTIy, ActorId: [7:7439873372428085528:2876], ActorState: ExecuteState, TraceId: 01jd8fqsh7dyx77fzyd67cpx3s, Sent query response back to proxy, proxyRequestId: 96, proxyId: [7:7439873265053900361:2085] |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate+StreamLookup >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> DataShardScan::ScanFollowedByUpdate >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> KqpDataIntegrityTrails::UpsertEvWrite [FAIL] >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> TKesusTest::TestAttachFastPathBlocked [GOOD] |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |80.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/olap/unittest >> KqpOlapBlobsSharing::BlobsSharingSplit1_3_2_1_clean Test command err: Trying to start YDB, gRPC: 25733, MsgBus: 17251 2024-11-21T23:08:13.759154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873318733280915:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:13.766802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00126c/r3tmp/tmpv0dDs0/pdisk_1.dat 2024-11-21T23:08:14.358955Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25733, node 1 2024-11-21T23:08:14.532792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:08:14.532814Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:08:14.532821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:08:14.532925Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:08:14.572106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:14.572217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:14.574506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17251 TClient is connected to server localhost:17251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:08:15.228258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:08:15.352027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2024-11-21T23:08:15.427896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:08:15.476270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:08:15.476440Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2024-11-21T23:08:15.485935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:08:15.486344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:08:15.486645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:08:15.486770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:08:15.486871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:08:15.486992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:08:15.487088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:08:15.487194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:08:15.487311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:08:15.487425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:08:15.487536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:08:15.487630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:08:15.495379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:08:15.524650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:08:15.524791Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2024-11-21T23:08:15.536313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:08:15.536389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:08:15.536640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:08:15.536771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:08:15.536908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:08:15.537052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:08:15.537169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:08:15.537263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:08:15.537361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:08:15.537490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:08:15.537636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:08:15.537740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:08:15.545349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:08:15.588682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:08:15.594653Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2024-11-21T23:08:15.604962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:08:15.605038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:08:15.605323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normal ... 327323216033:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:08:26.708403Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:08:26.710173Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732230506000 at tablet 72075186224037889 2024-11-21T23:08:26.710216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T23:08:26.710265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:08:26.710301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T23:08:26.710335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:08:26.710361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:26.718559Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:26.718641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:08:26.718721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:7439873327323216035:2291];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:08:26.718886Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732230506000 at tablet 72075186224037888 2024-11-21T23:08:26.718927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T23:08:26.718986Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:08:26.719027Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T23:08:26.719073Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:08:26.719102Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:26.719119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:26.719146Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:08:26.719192Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:7439873327323216034:2290];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:08:26.719263Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732230506000 at tablet 72075186224037890 2024-11-21T23:08:26.719282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439873327323216066:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T23:08:26.719313Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439873327323216066:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:08:26.719335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439873327323216066:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T23:08:26.719357Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439873327323216066:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:08:26.719372Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439873327323216066:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:26.719385Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439873327323216066:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:26.719399Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439873327323216066:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:08:26.719435Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:7439873327323216066:2292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:08:26.719492Z node 1 :TX_COLUMNSHARD DEBUG: Notified by mediator time cast with PlanStep# 1732230506000 at tablet 72075186224037891 2024-11-21T23:08:26.719507Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=1; 2024-11-21T23:08:26.719542Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:08:26.719565Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=0; 2024-11-21T23:08:26.719584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:08:26.719610Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:26.719626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:08:26.719642Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:08:26.719677Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:7439873327323216033:2289];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:08:26.750651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:7439873327323216033:2289];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T23:08:26.750716Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:7439873327323216035:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T23:08:26.750743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:7439873327323216034:2290];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T23:08:26.750769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:7439873327323216066:2292];fline=actor.cpp:33;event=skip_flush_writing; 3. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:199: Execute @ 0x17905596 4. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:258: Execute_ @ 0x1790AEE3 5. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: operator() @ 0x1792F137 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x1792F137 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19:1) &> @ 0x1792F137 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x1792F137 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x1792F137 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x184029C8 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x184029C8 12. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x184029C8 13. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x183CA638 14. /-S/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp:19: Execute @ 0x1792E303 15. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x183CBF05 16. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x183FC60C 17. ??:0: ?? @ 0x7F3FCB61CD8F 18. ??:0: ?? @ 0x7F3FCB61CE3F 19. ??:0: ?? @ 0x156DC028 >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2024-11-21T23:08:27.451785Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:27.451906Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:27.474483Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:27.474604Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:27.507103Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:27.507640Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=15493207644954550541, session=0, seqNo=0) 2024-11-21T23:08:27.507816Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:27.522802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=15493207644954550541, session=1) 2024-11-21T23:08:27.523151Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=13542064367757458884, session=0, seqNo=0) 2024-11-21T23:08:27.523312Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T23:08:27.537650Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=13542064367757458884, session=2) 2024-11-21T23:08:28.059952Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:28.060043Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:28.076117Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:28.076405Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.104261Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.104720Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=11973970011474134030, session=1, seqNo=0) 2024-11-21T23:08:28.116327Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=11973970011474134030, session=1) 2024-11-21T23:08:28.492328Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:28.492442Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:28.510873Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:28.511145Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.547184Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.547998Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=15535701478695308425, session=0, seqNo=0) 2024-11-21T23:08:28.548147Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:28.568344Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=15535701478695308425, session=1) 2024-11-21T23:08:29.093684Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:29.093818Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:29.130625Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:29.130820Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:29.161599Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:29.162202Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:130:2156], cookie=15084914353770233505, path="") 2024-11-21T23:08:29.183508Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:130:2156], cookie=15084914353770233505, status=SUCCESS) 2024-11-21T23:08:29.184827Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:139:2163], cookie=17620397130751495315, session=0, seqNo=0) 2024-11-21T23:08:29.185017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:29.200739Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:139:2163], cookie=17620397130751495315, session=1) 2024-11-21T23:08:29.201734Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=111, session=0, seqNo=0) 2024-11-21T23:08:29.201893Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T23:08:29.202122Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:140:2164], cookie=222, seqNo=0 2024-11-21T23:08:29.217209Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=111, session=2) 2024-11-21T23:08:29.769999Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:29.770135Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:29.801723Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:29.801909Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:29.817047Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:29.817437Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:130:2156], cookie=18091137456007236290, path="") 2024-11-21T23:08:29.842121Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:130:2156], cookie=18091137456007236290, status=SUCCESS) 2024-11-21T23:08:29.843332Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:139:2163], cookie=9071186176860575883, session=0, seqNo=0) 2024-11-21T23:08:29.843519Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:29.862086Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:139:2163], cookie=9071186176860575883, session=1) 2024-11-21T23:08:29.863112Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:139:2163], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T23:08:29.863321Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T23:08:29.863441Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T23:08:29.863895Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2164], cookie=111, session=0, seqNo=0) 2024-11-21T23:08:29.863996Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T23:08:29.864115Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2164], cookie=222, session=1, seqNo=0) 2024-11-21T23:08:29.879398Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:139:2163], cookie=123) 2024-11-21T23:08:29.879516Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2164], cookie=111, session=2) 2024-11-21T23:08:29.879579Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2164], cookie=222, session=1) >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup-EvWrite >> TKesusTest::TestCreateSemaphore [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> DataShardTxOrder::ZigZag_oo >> DataShardTxOrder::ZigZag_oo8_dirty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2024-11-21T23:08:28.080846Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:28.080964Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:28.101258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:28.101386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.138654Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.139631Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=1079403922838170137, session=0, seqNo=222) 2024-11-21T23:08:28.139818Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:28.152066Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=1079403922838170137, session=1) 2024-11-21T23:08:28.152448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=5029043922908155165, session=1, seqNo=111) 2024-11-21T23:08:28.171094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=5029043922908155165, session=1) 2024-11-21T23:08:28.828339Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:28.828446Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:28.857257Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:28.857600Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.887120Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.887654Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=111, session=0, seqNo=42) 2024-11-21T23:08:28.887887Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:28.888088Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=222, session=1, seqNo=41) 2024-11-21T23:08:28.900400Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=111, session=1) 2024-11-21T23:08:28.900503Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=222, session=1) 2024-11-21T23:08:29.413712Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:29.413840Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:29.438130Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:29.438429Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:29.475086Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:29.475569Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=11375097307716524731, session=0, seqNo=0) 2024-11-21T23:08:29.475729Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:29.495348Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=11375097307716524731, session=1) 2024-11-21T23:08:29.496977Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:145:2169], cookie=13497542879017560444) 2024-11-21T23:08:29.497082Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:145:2169], cookie=13497542879017560444) 2024-11-21T23:08:29.974366Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:29.974489Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:29.993715Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:29.993891Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:30.027389Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:30.601416Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:30.601525Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:30.619524Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:30.619692Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:30.634274Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:30.634818Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=1666314224102950204, session=0, seqNo=0) 2024-11-21T23:08:30.634972Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:30.657939Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=1666314224102950204, session=1) 2024-11-21T23:08:30.658289Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T23:08:30.658502Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T23:08:30.658596Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T23:08:30.671305Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2024-11-21T23:08:30.672401Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:144:2168], cookie=14828344833176786773, name="Sem1", limit=42) 2024-11-21T23:08:30.672571Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2024-11-21T23:08:30.687414Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:144:2168], cookie=14828344833176786773) 2024-11-21T23:08:30.688060Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:149:2173], cookie=10689310472283727654, name="Sem1", limit=42) 2024-11-21T23:08:30.705086Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:149:2173], cookie=10689310472283727654) 2024-11-21T23:08:30.705711Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:154:2178], cookie=15673720663421070074, name="Sem1", limit=51) 2024-11-21T23:08:30.718280Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:154:2178], cookie=15673720663421070074) 2024-11-21T23:08:30.718942Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:159:2183], cookie=5788209066747010365, name="Lock1", limit=42) 2024-11-21T23:08:30.731348Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:159:2183], cookie=5788209066747010365) 2024-11-21T23:08:30.732019Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:164:2188], cookie=8591913802104754175, name="Lock1", limit=18446744073709551615) 2024-11-21T23:08:30.751394Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:164:2188], cookie=8591913802104754175) 2024-11-21T23:08:30.752136Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:169:2193], cookie=1995072779850046741, name="Sem1") 2024-11-21T23:08:30.752247Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:169:2193], cookie=1995072779850046741) 2024-11-21T23:08:30.753552Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:172:2196], cookie=15655844506731866084, name="Sem2") 2024-11-21T23:08:30.753645Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:172:2196], cookie=15655844506731866084) 2024-11-21T23:08:30.785345Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:30.785463Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:30.795042Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:30.795592Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:30.863501Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:30.863687Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T23:08:30.864097Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:212:2226], cookie=8362211879981894948, name="Sem1") 2024-11-21T23:08:30.864194Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:212:2226], cookie=8362211879981894948) 2024-11-21T23:08:30.864856Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:219:2232], cookie=1455241735229662616, name="Sem2") 2024-11-21T23:08:30.864932Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:219:2232], cookie=1455241735229662616) >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup-EvWrite >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup-EvWrite >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardScan::ScanFollowedByUpdate [GOOD] >> DataShardTxOrder::DelayData >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2024-11-21T23:08:28.223881Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:28.224034Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:28.247025Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:28.247175Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.281875Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.282193Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:130:2156], cookie=2765376913668682299, path="/foo/bar/baz") 2024-11-21T23:08:28.301780Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:130:2156], cookie=2765376913668682299, status=SUCCESS) 2024-11-21T23:08:28.302434Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:139:2163], cookie=15897832236327921765) 2024-11-21T23:08:28.323193Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:139:2163], cookie=15897832236327921765) 2024-11-21T23:08:28.323829Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:144:2168], cookie=4049516906613898288, path="/foo/bar/baz") 2024-11-21T23:08:28.341343Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:144:2168], cookie=4049516906613898288, status=SUCCESS) 2024-11-21T23:08:28.342051Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:149:2173], cookie=9375766714423826997) 2024-11-21T23:08:28.362206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:149:2173], cookie=9375766714423826997) 2024-11-21T23:08:28.376876Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:28.377007Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:28.377669Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:28.378294Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.433306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.433714Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:191:2205], cookie=2007082288753803376) 2024-11-21T23:08:28.461650Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:191:2205], cookie=2007082288753803376) 2024-11-21T23:08:28.462355Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:199:2212], cookie=4910830173678314034, path="/foo/bar/baz") 2024-11-21T23:08:28.475936Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:199:2212], cookie=4910830173678314034, status=SUCCESS) 2024-11-21T23:08:28.476639Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:204:2217], cookie=6015639728805375524, path="/foo/bar/baz") 2024-11-21T23:08:28.476730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:204:2217], cookie=6015639728805375524, status=PRECONDITION_FAILED) 2024-11-21T23:08:28.919144Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:28.919244Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:28.936413Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:28.936730Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.963116Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.963512Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:130:2156], cookie=10112752612715273162, name="Lock1") 2024-11-21T23:08:28.963614Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:130:2156], cookie=10112752612715273162) 2024-11-21T23:08:29.473188Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:29.473305Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:29.490053Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:29.490172Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:29.516027Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:29.516579Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=15930215338371392674, session=0, seqNo=0) 2024-11-21T23:08:29.516744Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:29.531500Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=15930215338371392674, session=1) 2024-11-21T23:08:29.531936Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:128:2154], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T23:08:29.532132Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T23:08:29.532227Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T23:08:29.551114Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:128:2154], cookie=111) 2024-11-21T23:08:29.551695Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:139:2163], cookie=10413625254024694357, name="Lock1", force=0) 2024-11-21T23:08:29.566687Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:139:2163], cookie=10413625254024694357) 2024-11-21T23:08:29.567345Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:144:2168], cookie=4508009937265243588, name="Sem1", force=0) 2024-11-21T23:08:29.587181Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:144:2168], cookie=4508009937265243588) 2024-11-21T23:08:29.587778Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:149:2173], cookie=8579156663171363680, name="Sem1", limit=42) 2024-11-21T23:08:29.587941Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2024-11-21T23:08:29.606625Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:149:2173], cookie=8579156663171363680) 2024-11-21T23:08:29.607157Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:154:2178], cookie=2319401690667078159, name="Sem1", force=0) 2024-11-21T23:08:29.607238Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2024-11-21T23:08:29.626145Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:154:2178], cookie=2319401690667078159) 2024-11-21T23:08:29.626761Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:159:2183], cookie=14542621146399717785, name="Sem1", force=0) 2024-11-21T23:08:29.640895Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:159:2183], cookie=14542621146399717785) 2024-11-21T23:08:30.208154Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:30.208270Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:30.231317Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:30.231531Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:30.260600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:30.261194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=3849796168843192702, session=0, seqNo=0) 2024-11-21T23:08:30.261362Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:30.273442Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=3849796168843192702, session=1) 2024-11-21T23:08:30.273735Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=12242667883944814671, session=0, seqNo=0) 2024-11-21T23:08:30.273846Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T23:08:30.291315Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=12242667883944814671, session=2) 2024-11-21T23:08:30.291711Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:130:2156], cookie=16968039332562998034 2024-11-21T23:08:30.292266Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:142:2166], cookie=9040377939328051571, name="Sem1", limit=3) 2024-11-21T23:08:30.292430Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T23:08:30.307568Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:142:2166], cookie=9040377939328051571) 2024-11-21T23:08:30.307892Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=112, name="Sem1") 2024-11-21T23:08:30.307956Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=112) 2024-11-21T23:08:30.308118Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=113, name="Sem1") 2024-11-21T23:08:30.308167Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=113) 2024-11-21T23:08:30.308346Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:130:2156], cookie=5515205165147064076, session=2, seqNo=0) 2024-11-21T23:08:30.320364Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:130:2156], cookie=5515205165147064076, session=2) 2024-11-21T23:08:30.320671Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=114, name="Sem1") 2024-11-21T23:08:30.320750Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=114) 2024-11-21T23:08:30.320941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=115, name="Sem1") 2024-11-21T23:08:30.320999Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=115) 2024-11-21T23:08:30.321372Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:149:2173], cookie=11091503816881927589, name="Sem1") 2024-11-21T23:08:30.336560Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:149:2173], cookie=11091503816881927589) 2024-11-21T23:08:30.336895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=116, session=1, semaphore="Sem1" count=1) 2024-11-21T23:08:30.337052Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T23:08:30.359099Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=116) 2024-11-21T23:08:30.359488Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=117, session=2, semaphore="Sem1" count=2) 2024-11-21T23:08:30.359669Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2024-11-21T23:08:30.383174Z node 4 :KESUS_TABLET DEBUG: [72057594037 ... de 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2024-11-21T23:08:31.275575Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:200:2218], cookie=17054890172846221007) 2024-11-21T23:08:31.276041Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=126, session=1, semaphore="Sem2" count=3) 2024-11-21T23:08:31.276206Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2024-11-21T23:08:31.288869Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=126) 2024-11-21T23:08:31.289542Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=127, name="Sem2") 2024-11-21T23:08:31.289646Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=127) 2024-11-21T23:08:31.289981Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=128, session=1, semaphore="Sem2" count=3) 2024-11-21T23:08:31.302434Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=128) 2024-11-21T23:08:31.556735Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.569369Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.580201Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=129, session=1, semaphore="Sem2" count=2) 2024-11-21T23:08:31.594616Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=129) 2024-11-21T23:08:31.595155Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=130, name="Sem2") 2024-11-21T23:08:31.595270Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=130) 2024-11-21T23:08:31.595595Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:130:2156], cookie=131, session=1, semaphore="Sem2" count=1) 2024-11-21T23:08:31.608827Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:130:2156], cookie=131) 2024-11-21T23:08:31.609365Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=132, name="Sem2") 2024-11-21T23:08:31.609471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=132) 2024-11-21T23:08:31.609839Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:130:2156], cookie=133, name="Sem2") 2024-11-21T23:08:31.609921Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:130:2156], cookie=133) 2024-11-21T23:08:32.162481Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:32.162603Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:32.186055Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:32.186262Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:32.203414Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:32.217809Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=18187822397236135903, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2024-11-21T23:08:32.218139Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2024-11-21T23:08:32.243639Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=18187822397236135903) 2024-11-21T23:08:32.244404Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=11463371941942558456, path="/Root1/Res", config={ }) 2024-11-21T23:08:32.244702Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2024-11-21T23:08:32.258602Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=11463371941942558456) 2024-11-21T23:08:32.259406Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:144:2168], cookie=15093041877109883601, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2024-11-21T23:08:32.259616Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2024-11-21T23:08:32.272156Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:144:2168], cookie=15093041877109883601) 2024-11-21T23:08:32.272800Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:149:2173], cookie=14410550927346393851, path="/Root2/Res", config={ }) 2024-11-21T23:08:32.273010Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2024-11-21T23:08:32.285271Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:149:2173], cookie=14410550927346393851) 2024-11-21T23:08:32.285941Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:154:2178], cookie=8850778512209068979, path="/Root2/Res/Subres", config={ }) 2024-11-21T23:08:32.286243Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2024-11-21T23:08:32.298443Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:154:2178], cookie=8850778512209068979) 2024-11-21T23:08:32.299774Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:159:2183]. Cookie: 9343182416429708990. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:32.299833Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:159:2183], cookie=9343182416429708990) 2024-11-21T23:08:32.344563Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:32.387771Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:32.426861Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:32.427744Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:166:2187]. Cookie: 12984825340846492014. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2024-11-21T23:08:32.428807Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:169:2190]. Cookie: 17199524298328042319. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:32.428882Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:169:2190], cookie=17199524298328042319) 2024-11-21T23:08:32.464425Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:32.507087Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:32.507992Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:174:2194]. Cookie: 15453534524729449235. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2024-11-21T23:08:32.509019Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:159:2183]. Cookie: 13539314881405358856. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:32.509084Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:159:2183], cookie=13539314881405358856) 2024-11-21T23:08:32.509907Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:169:2190]. Cookie: 18344528299076902522. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:32.509971Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:169:2190], cookie=18344528299076902522) 2024-11-21T23:08:32.552382Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:159:2183]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:32.552548Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:169:2190]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:32.553516Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:181:2201]. Cookie: 6936990052148004354. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2024-11-21T23:07:07.561149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873035116076214:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:07.561291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e13/r3tmp/tmpaS3ncN/pdisk_1.dat 2024-11-21T23:07:08.346467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:08.346641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:08.346920Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:08.365469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6044, node 1 2024-11-21T23:07:08.735582Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:08.735609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:08.735616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:08.735774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:09.051932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.065793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:09.068902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.073977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:09.074218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:09.074233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:09.076929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:09.076957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T23:07:09.080331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:09.085444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230429132, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:09.085490Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:09.085797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:09.087972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:09.088145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:09.088209Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:09.088319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:09.088376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:09.088425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:07:09.094125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:09.094178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:09.094198Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:09.094273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T23:07:09.094510Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:09.249762Z node 1 :TICKET_PARSER DEBUG: Ticket 2154A2A137A599BC3C2B98A8C8A23630AFF5662F (ipv6:[::1]:59636) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2024-11-21T23:07:09.395434Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:59652) has now valid token of root@builtin 2024-11-21T23:07:09.551341Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T23:07:13.514996Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873062561508279:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:13.515116Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e13/r3tmp/tmpSGmpyj/pdisk_1.dat 2024-11-21T23:07:13.719345Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:13.746718Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:13.746806Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:13.757333Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25323, node 4 2024-11-21T23:07:13.918468Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:13.918492Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:13.918503Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:13.918598Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:07:14.172959Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:14.173223Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:14.173242Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:14.174987Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:14.175147Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:14.175160Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T23:07:14.176712Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:14.176728Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:07:14.177890Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:14.178854Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:07:14.180745Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230434228, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:14.180783Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:07:14.180990Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:07:14.182644Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:14.182786Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:14.182817Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:07:14.182883Z node 4 :FLAT_TX_SCHEMES ... EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:08:04.328236Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:04.328826Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:04.328857Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:04.336800Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: root@builtin, status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:08:04.337094Z node 25 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:08:04.337115Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:08:04.347402Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:04.347442Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:08:04.349343Z node 25 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:08:04.351667Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:04.367028Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230484404, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:04.367077Z node 25 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:08:04.367404Z node 25 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:08:04.379403Z node 25 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:04.379648Z node 25 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:04.379723Z node 25 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:08:04.379883Z node 25 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:08:04.379951Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:08:04.380013Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:08:04.382068Z node 25 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:08:04.382130Z node 25 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:08:04.382158Z node 25 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:08:04.382306Z node 25 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T23:08:07.423198Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7439873273306892645:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:07.423302Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:14.718775Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:51492) has now valid token of root@builtin 2024-11-21T23:08:14.889296Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2024-11-21T23:08:18.061633Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7439873335447392728:2189];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001e13/r3tmp/tmpDPO3Eq/pdisk_1.dat 2024-11-21T23:08:18.293537Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:08:18.520530Z node 28 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:18.625928Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:18.626064Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:18.642942Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12050, node 28 2024-11-21T23:08:19.131298Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:08:19.131325Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:08:19.131336Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:08:19.131496Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:08:20.038146Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:20.038734Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:20.038768Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:20.042317Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:08:20.042624Z node 28 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:08:20.042646Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:08:20.051450Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:20.051497Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:08:20.056232Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:20.060905Z node 28 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:08:20.080058Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230500112, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:20.080139Z node 28 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:08:20.080536Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:08:20.091622Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:20.091872Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:20.091947Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:08:20.092117Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:08:20.092176Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:08:20.092251Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:08:20.096041Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:08:20.096116Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:08:20.096145Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:08:20.096271Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:08:22.994752Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7439873335447392728:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:22.994869Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:30.506808Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:33934) has now valid token of root@builtin 2024-11-21T23:08:30.603616Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> DataShardTxOrder::RandomPointsAndRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2024-11-21T23:07:54.889171Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.889198Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.889220Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:54.889683Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:54.910659Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:54.910854Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.911223Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:54.911675Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.911779Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:54.911852Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:54.911895Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:54.912656Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.912681Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.912700Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:54.913018Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:54.922727Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:54.923035Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.923375Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:54.923825Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.923969Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:54.924059Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:54.924102Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:54.925212Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.925235Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.925253Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:54.927734Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:54.937325Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:54.937541Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.937936Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:54.938786Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.940945Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:54.941068Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:54.941124Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:54.942334Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.942356Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.942439Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:54.942848Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:54.952176Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:54.952359Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.952720Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:54.954217Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.954969Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:54.961153Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:54.961222Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:54.962349Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.962368Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.962386Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:54.966258Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:54.978664Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:54.978860Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.979168Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:54.979577Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.979678Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:54.979743Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:54.979778Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:07:54.980515Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.980535Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.986452Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:54.989673Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:54.990480Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:54.990604Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.990915Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:54.991319Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.992176Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:54.992276Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:54.992313Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:07:54.993263Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.993283Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:54.993304Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:55.001179Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:55.004480Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:55.017935Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:55.018620Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:55.019385Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:55.019557Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:55.019642Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:55.019679Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:07:55.020744Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:55.020767Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:55.020801Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:55.021716Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:07:55.038689Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:07:55.038874Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:55.039205Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:07:55.040774Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:55.041147Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:07:55.041228Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:07:55.041266Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:07:55.062488Z :ReadSession INFO: Random seed for debugging is 1732230475062448 2024-11-21T23:07:55.778129Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873239428734344:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:55.778177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:56.355318Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:56.369255Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:56.379979Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873239999052386:2265];send_ ... ode 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T23:08:19.715090Z node 2 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T23:08:19.722594Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T23:08:19.722763Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T23:08:19.679000Z WriteTime: 2024-11-21T23:08:19.690000Z Ip: "ipv6:[::1]:43742" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:43742" } } } 2024-11-21T23:08:19.723059Z :DEBUG: [/Root] [/Root] [73cc6191-59b2d1c6-c539cacb-4edaecc2] [dc1] Commit offsets [2, 3). Partition stream id: 1 2024-11-21T23:08:19.723376Z :DEBUG: [/Root] [/Root] [73cc6191-59b2d1c6-c539cacb-4edaecc2] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:08:19.727666Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T23:08:19.727938Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 got read request: guid# b032631e-fd746b70-7ac47554-88dcf08e 2024-11-21T23:08:19.730538Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:08:19.730602Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:08:19.730736Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_8978373981820467403_v1 2024-11-21T23:08:19.730882Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:08:19.728305Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 grpc read done: success# 1, data# { commit { cookies { assign_id: 1 partition_cookie: 3 } } } 2024-11-21T23:08:19.728528Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 commit request from client for 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T23:08:19.728560Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T23:08:19.728610Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2024-11-21T23:08:19.738766Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T23:08:19.738822Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2024-11-21T23:08:19.738881Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2024-11-21T23:08:19.739838Z :DEBUG: [/Root] [/Root] [73cc6191-59b2d1c6-c539cacb-4edaecc2] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2024-11-21T23:08:19.738154Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:08:19.738235Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:08:19.738306Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T23:08:19.782572Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|85ecfb31-52d1e3a5-63dbec35-10d4be25_0] Write session will now close 2024-11-21T23:08:19.782652Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|85ecfb31-52d1e3a5-63dbec35-10d4be25_0] Write session: aborting 2024-11-21T23:08:19.783286Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|85ecfb31-52d1e3a5-63dbec35-10d4be25_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:08:19.783333Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|85ecfb31-52d1e3a5-63dbec35-10d4be25_0] Write session: destroy 2024-11-21T23:08:19.794547Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|85ecfb31-52d1e3a5-63dbec35-10d4be25_0 grpc read done: success: 0 data: 2024-11-21T23:08:19.794585Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|85ecfb31-52d1e3a5-63dbec35-10d4be25_0 grpc read failed 2024-11-21T23:08:19.795062Z node 1 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 4 sessionId: test-message-group-id|85ecfb31-52d1e3a5-63dbec35-10d4be25_0 2024-11-21T23:08:19.795094Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|85ecfb31-52d1e3a5-63dbec35-10d4be25_0 is DEAD 2024-11-21T23:08:19.798187Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:08:19.798246Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439873342507952574:2610] destroyed 2024-11-21T23:08:19.798298Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:08:19.795444Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:08:21.661069Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:08:22.294583Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T23:08:26.659283Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:08:29.710140Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T23:08:29.812921Z :INFO: [/Root] [/Root] [73cc6191-59b2d1c6-c539cacb-4edaecc2] Closing read session. Close timeout: 0.000000s 2024-11-21T23:08:29.813010Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T23:08:29.813058Z :INFO: [/Root] [/Root] [73cc6191-59b2d1c6-c539cacb-4edaecc2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16659 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:08:29.813200Z :NOTICE: [/Root] [/Root] [73cc6191-59b2d1c6-c539cacb-4edaecc2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:08:29.813254Z :DEBUG: [/Root] [/Root] [73cc6191-59b2d1c6-c539cacb-4edaecc2] [dc1] Abort session to cluster 2024-11-21T23:08:29.813944Z :NOTICE: [/Root] [/Root] [73cc6191-59b2d1c6-c539cacb-4edaecc2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:08:29.863669Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 grpc read done: success# 0, data# { } 2024-11-21T23:08:29.863713Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 grpc read failed 2024-11-21T23:08:29.863748Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 grpc closed 2024-11-21T23:08:29.863806Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_8978373981820467403_v1 is DEAD 2024-11-21T23:08:29.878787Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439873316738148251:2521] disconnected; active server actors: 1 2024-11-21T23:08:29.878851Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439873316738148251:2521] client user disconnected session shared/user_1_1_8978373981820467403_v1 2024-11-21T23:08:29.881032Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:08:29.881074Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_8978373981820467403_v1 2024-11-21T23:08:29.881106Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439873316738148262:2525] destroyed 2024-11-21T23:08:29.881174Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_8978373981820467403_v1 2024-11-21T23:08:31.297887Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:31.297937Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:31.297974Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:08:31.298418Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:08:31.298889Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:08:31.299465Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:31.300697Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:08:31.301476Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:08:31.301957Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:08:31.302185Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T23:08:31.302299Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:08:31.302351Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:08:31.302388Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T23:08:31.303004Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:08:31.303057Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> DataShardOutOfOrder::TestReadTableWriteConflict >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] Test command err: 2024-11-21T23:08:09.398310Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T23:08:09.402242Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T23:08:09.402555Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T23:08:09.403244Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T23:08:09.404552Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T23:08:09.404612Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T23:08:09.405520Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:25:2072] ControllerId# 72057594037932033 2024-11-21T23:08:09.405570Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T23:08:09.405725Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T23:08:09.406050Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T23:08:09.417600Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:08:09.417671Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:08:09.420451Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:33:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.420684Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:34:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.420994Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:35:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.421168Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.421348Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.421487Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.421622Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:24:2071] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.421650Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:08:09.421730Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:25:2072] 2024-11-21T23:08:09.421773Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:25:2072] 2024-11-21T23:08:09.421822Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T23:08:09.421914Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T23:08:09.422656Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T23:08:09.462710Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T23:08:09.462794Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T23:08:09.462836Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T23:08:09.464541Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:09.464937Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T23:08:09.464977Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T23:08:09.465010Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2024-11-21T23:08:09.469315Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2024-11-21T23:08:09.469883Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2024-11-21T23:08:09.470090Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T23:08:09.470247Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:29:2063] 2024-11-21T23:08:09.470283Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:29:2063] 2024-11-21T23:08:09.470777Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2024-11-21T23:08:09.470840Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2024-11-21T23:08:09.470928Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2024-11-21T23:08:09.470976Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T23:08:09.471120Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:09.471189Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T23:08:09.471382Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2024-11-21T23:08:09.471465Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2090] 2024-11-21T23:08:09.471509Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2090] 2024-11-21T23:08:09.471566Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T23:08:09.471772Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T23:08:09.471947Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:09.472029Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2024-11-21T23:08:09.483594Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2090] 2024-11-21T23:08:09.483762Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2024-11-21T23:08:09.483906Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T23:08:09.483961Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2024-11-21T23:08:09.484766Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:25:2072] 2024-11-21T23:08:09.485148Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:29:2063] 2024-11-21T23:08:09.485207Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:25:2072] 2024-11-21T23:08:09.485269Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:25:2072] 2024-11-21T23:08:09.485593Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T23:08:09.485767Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2024-11-21T23:08:09.485825Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2024-11-21T23:08:09.485859Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2024-11-21T23:08:09.485887Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T23:08:09.486067Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2024-11-21T23:08:09.486096Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2024-11-21T23:08:09.486119Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2024-11-21T23:08:09.486151Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T23:08:09.486226Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T23:08:09.486313Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2024-11-21T23:08:09.486351Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T23:08:09.486425Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2024-11-21T23:08:09.486466Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2024-11-21T23:08:09.486539Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:29:2063] 2024-11-21T23:08:09.486585Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:29:2063] 2024-11-21T23:08:09.486715Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037927937} 2024-11-21T23:08:09.487217Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2024-11-21T23:08:09.487254Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T23:08:09.487405Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:321} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2024-11-21T23:08:09.487547Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {Ev ... TabletId# 72075186224037888 RecordGeneration# 3 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2024-11-21T23:08:34.572187Z node 20 :BS_PROXY_COLLECT DEBUG: [27260cd07680ee4f] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 3 Channel# 1 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2024-11-21T23:08:34.572247Z node 20 :BS_PROXY_COLLECT INFO: [27260cd07680ee4f] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 3 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2024-11-21T23:08:34.572997Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [20:678:2477] 2024-11-21T23:08:34.573071Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [20:678:2477] 2024-11-21T23:08:34.573389Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:34.573479Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [21:546:2091] 2024-11-21T23:08:34.573800Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 21 [20:678:2477] 2024-11-21T23:08:34.574020Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [20:678:2477] 2024-11-21T23:08:34.574092Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [20:678:2477] 2024-11-21T23:08:34.574164Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] ::Bootstrap [21:682:2142] 2024-11-21T23:08:34.574193Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] lookup [21:682:2142] 2024-11-21T23:08:34.574265Z node 21 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594046678944 entry.State: StNormal ev: {EvForward TabletID: 72057594046678944 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:34.574306Z node 21 :TABLET_RESOLVER DEBUG: SelectForward node 21 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [20:317:2260] 2024-11-21T23:08:34.574352Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 20 [21:682:2142] 2024-11-21T23:08:34.574539Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [21:682:2142] 2024-11-21T23:08:34.574579Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [21:682:2142] 2024-11-21T23:08:34.574809Z node 20 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [21:682:2142] 2024-11-21T23:08:34.575058Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [20:678:2477] 2024-11-21T23:08:34.575123Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] immediate retry [20:678:2477] 2024-11-21T23:08:34.575175Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [20:678:2477] 2024-11-21T23:08:34.575273Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2024-11-21T23:08:34.575429Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:34.575552Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [21:682:2142] 2024-11-21T23:08:34.575617Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [21:682:2142] 2024-11-21T23:08:34.575712Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T23:08:34.575860Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T23:08:34.575952Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T23:08:34.575990Z node 20 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T23:08:34.576062Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T23:08:34.576166Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [21:682:2142] 2024-11-21T23:08:34.576222Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [21:682:2142] 2024-11-21T23:08:34.576320Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [21:682:2142] 2024-11-21T23:08:34.576399Z node 20 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0} 2024-11-21T23:08:34.576517Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [20:639:2448] CurrentLeaderTablet: [20:641:2449] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T23:08:34.576582Z node 20 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2024-11-21T23:08:34.576660Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [20:639:2448] 2024-11-21T23:08:34.576779Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [20:678:2477] 2024-11-21T23:08:34.576878Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [20:678:2477] 2024-11-21T23:08:34.577027Z node 20 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [20:678:2477] 2024-11-21T23:08:34.577146Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [20:678:2477] 2024-11-21T23:08:34.577183Z node 20 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [20:678:2477] 2024-11-21T23:08:34.577340Z node 20 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [21:681:2142] EventType# 271122945 2024-11-21T23:08:34.577545Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2024-11-21T23:08:34.577657Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T23:08:34.577967Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T23:08:34.578073Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{17, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T23:08:34.579545Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [21:688:2143] 2024-11-21T23:08:34.579589Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [21:688:2143] 2024-11-21T23:08:34.579860Z node 21 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:34.579915Z node 21 :TABLET_RESOLVER DEBUG: SelectForward node 21 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [20:316:2259] 2024-11-21T23:08:34.580113Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [21:688:2143] 2024-11-21T23:08:34.580277Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 20 [21:688:2143] 2024-11-21T23:08:34.580756Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [21:688:2143] 2024-11-21T23:08:34.580803Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [21:688:2143] 2024-11-21T23:08:34.581316Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [21:688:2143] 2024-11-21T23:08:34.583091Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [21:688:2143] 2024-11-21T23:08:34.583136Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [21:688:2143] 2024-11-21T23:08:34.583164Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [21:688:2143] 2024-11-21T23:08:34.583268Z node 21 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [21:688:2143] 2024-11-21T23:08:34.583589Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [21:686:2143] EventType# 268959744 2024-11-21T23:08:34.583792Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{43, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2024-11-21T23:08:34.583873Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{43, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T23:08:34.584063Z node 20 :HIVE WARN: HIVE#72057594037927937 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:34.584174Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{43, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{24, redo 152b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2024-11-21T23:08:34.584259Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{43, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T23:08:34.584816Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [20:697:2482] 2024-11-21T23:08:34.584874Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [20:697:2482] 2024-11-21T23:08:34.584980Z node 20 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:34.585086Z node 20 :TABLET_RESOLVER DEBUG: SelectForward node 20 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [20:316:2259] 2024-11-21T23:08:34.585163Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [20:697:2482] 2024-11-21T23:08:34.585233Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [20:697:2482] 2024-11-21T23:08:34.585300Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [20:697:2482] 2024-11-21T23:08:34.585371Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [20:697:2482] 2024-11-21T23:08:34.585535Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [20:697:2482] 2024-11-21T23:08:34.585723Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [20:697:2482] 2024-11-21T23:08:34.585789Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [20:697:2482] 2024-11-21T23:08:34.585840Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [20:697:2482] 2024-11-21T23:08:34.585912Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [20:697:2482] 2024-11-21T23:08:34.585962Z node 20 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [20:697:2482] 2024-11-21T23:08:34.586047Z node 20 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [20:696:2481] EventType# 268697616 >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate+StreamLookup [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2024-11-21T23:08:29.093820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:29.093888Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:29.093985Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:29.109827Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:29.110803Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T23:08:29.111119Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:29.123497Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:29.180778Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:29.181504Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:29.186101Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T23:08:29.186245Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T23:08:29.186326Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T23:08:29.186772Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:29.237184Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T23:08:29.237434Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:29.237637Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T23:08:29.237691Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T23:08:29.237736Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T23:08:29.237773Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:29.238199Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:29.238265Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:29.243302Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T23:08:29.243479Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T23:08:29.243836Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:29.243887Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:29.243932Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T23:08:29.243976Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:29.244062Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:29.244109Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T23:08:29.244160Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:29.374548Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:29.374641Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:29.374710Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T23:08:29.379762Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T23:08:29.379890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:29.380090Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T23:08:29.380417Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T23:08:29.380576Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T23:08:29.380707Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T23:08:29.380847Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:29.380927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T23:08:29.381016Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T23:08:29.381092Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:29.381908Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:29.382001Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T23:08:29.382086Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T23:08:29.382648Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:29.382775Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T23:08:29.382820Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T23:08:29.382869Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T23:08:29.382923Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:29.382981Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:29.414654Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T23:08:29.414745Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:29.414811Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:29.414864Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T23:08:29.414951Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:29.415675Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:29.415737Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:29.415786Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T23:08:29.415958Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2024-11-21T23:08:29.415998Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:29.416157Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:29.416207Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T23:08:29.416244Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2024-11-21T23:08:29.416300Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2024-11-21T23:08:29.420446Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2024-11-21T23:08:29.420538Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:29.420824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:29.420872Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:29.420925Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:29.420968Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:29.421028Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:29.421074Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2024-11-21T23:08:29.421112Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2024-11-21T23:08:29.421163Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T23:08:29.421200Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:29.421241Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:29.421275Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:29.421491Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2024-11-21T23:08:29.421548Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T23:08:29.421588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:29.421614Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T23:08:29.421637Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T23:08:29.421708Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:29.421738Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T23:08:29.421774Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:29.421839Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:29.421906Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2024-11-21T23:08:29.421941Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2024-11-21T23:08:29.421984Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2024-11-21T23:08:29.422033Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2024-11-21T23:08:29.422060Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:29.422105Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit MakeSnapshot 2024-11-21T23:08:29.422135Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit MakeSnapshot 2024-11-21T23:08:29.422184Z node 1 :TX_DATASHARD ... 7186 2024-11-21T23:08:35.617245Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2024-11-21T23:08:35.617275Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:08:35.617298Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:35.617594Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2024-11-21T23:08:35.617641Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.617677Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2024-11-21T23:08:35.617842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2024-11-21T23:08:35.617870Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.617904Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2024-11-21T23:08:35.618005Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2024-11-21T23:08:35.618057Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.618086Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2024-11-21T23:08:35.618167Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T23:08:35.618194Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.618213Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T23:08:35.618287Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2024-11-21T23:08:35.618312Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.618333Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2024-11-21T23:08:35.618430Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T23:08:35.618464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.618487Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T23:08:35.618564Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T23:08:35.618590Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.618612Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T23:08:35.618667Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T23:08:35.618687Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.618718Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2024-11-21T23:08:35.618802Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T23:08:35.618825Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.618846Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T23:08:35.618913Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T23:08:35.618933Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.618952Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T23:08:35.619026Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T23:08:35.619060Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.619081Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T23:08:35.619137Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T23:08:35.619169Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.619268Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T23:08:35.619348Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:432:2382], Recipient [1:227:2222]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T23:08:35.619378Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.619398Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T23:08:35.619491Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:35.619522Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437184 on unit CompleteOperation 2024-11-21T23:08:35.619568Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:35.619630Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T23:08:35.619731Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:35.619879Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:35.619903Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:151] at 9437184 on unit CompleteOperation 2024-11-21T23:08:35.619936Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 151] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:35.619975Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T23:08:35.619998Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:35.620082Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:35.620109Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437184 on unit CompleteOperation 2024-11-21T23:08:35.620137Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:35.620170Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T23:08:35.620211Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:35.620311Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:35.620351Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:154] at 9437184 on unit CompleteOperation 2024-11-21T23:08:35.620391Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:35.620433Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T23:08:35.620458Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:35.620635Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T23:08:35.620668Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.620694Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T23:08:35.620781Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T23:08:35.620823Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.620854Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T23:08:35.620950Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T23:08:35.620985Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.621013Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T23:08:35.621094Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:328:2301]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T23:08:35.621119Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:35.621144Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> TTableProfileTests::WrongTableProfile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2024-11-21T23:08:27.974710Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:27.974856Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:28.006778Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:28.006916Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.047263Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.069350Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=2320547314539040782, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T23:08:28.069803Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T23:08:28.087261Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=2320547314539040782) 2024-11-21T23:08:28.087934Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=13612778382239007909, path="/Root/Res", config={ }) 2024-11-21T23:08:28.088165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2024-11-21T23:08:28.105756Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=13612778382239007909) 2024-11-21T23:08:28.107537Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:145:2169]. Cookie: 7737961739024921334. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:28.107624Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:145:2169], cookie=7737961739024921334) 2024-11-21T23:08:28.108161Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:145:2169]. Cookie: 18135614002174276952. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2024-11-21T23:08:28.108214Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:145:2169], cookie=18135614002174276952) 2024-11-21T23:08:30.462795Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:30.462902Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:30.486439Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:30.486743Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:30.508319Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:30.508891Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:130:2156], cookie=11253878078547576906, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T23:08:30.509198Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T23:08:30.531659Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:130:2156], cookie=11253878078547576906) 2024-11-21T23:08:30.532581Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:140:2164]. Cookie: 13780976040810417591. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:30.532636Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:140:2164], cookie=13780976040810417591) 2024-11-21T23:08:30.533160Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:140:2164]. Cookie: 16141425870352304903. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:30.533204Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:140:2164], cookie=16141425870352304903) 2024-11-21T23:08:30.533695Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:140:2164]. Cookie: 3741062586021268320. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2024-11-21T23:08:30.533747Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:140:2164], cookie=3741062586021268320) 2024-11-21T23:08:30.534144Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:140:2164]. Cookie: 17821772058708983013. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2024-11-21T23:08:30.534190Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:140:2164], cookie=17821772058708983013) 2024-11-21T23:08:32.799317Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:32.799444Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:32.819672Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:32.819960Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:32.851847Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:32.852213Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:128:2154], cookie=10386759310935485740, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T23:08:32.852454Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T23:08:32.869124Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:128:2154], cookie=10386759310935485740) 2024-11-21T23:08:32.869799Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:138:2162], cookie=17582084303071118950, path="/Root/Res1", config={ }) 2024-11-21T23:08:32.870014Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2024-11-21T23:08:32.889662Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:138:2162], cookie=17582084303071118950) 2024-11-21T23:08:32.890260Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:143:2167], cookie=10649024353736070703, path="/Root/Res2", config={ }) 2024-11-21T23:08:32.890490Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2024-11-21T23:08:32.902352Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:143:2167], cookie=10649024353736070703) 2024-11-21T23:08:32.903268Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:148:2172]. Cookie: 16012837835198591917. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:32.903327Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:148:2172], cookie=16012837835198591917) 2024-11-21T23:08:32.903901Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:148:2172]. Cookie: 4428768199662911658. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:32.903954Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:148:2172], cookie=4428768199662911658) 2024-11-21T23:08:32.904409Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:148:2172]. Cookie: 15120496245810957303. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1019500 } } 2024-11-21T23:08:32.904472Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:148:2172], cookie=15120496245810957303) 2024-11-21T23:08:35.450217Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:35.450346Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:35.471120Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:35.471293Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:35.502216Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:35.502748Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=13114833272902543950, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2024-11-21T23:08:35.503088Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T23:08:35.516257Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=13114833272902543950) 2024-11-21T23:08:35.517148Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:140:2164]. Cookie: 794819913427906093. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:35.517208Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:140:2164], cookie=794819913427906093) 2024-11-21T23:08:35.517691Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 13698999288221351983. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2024-11-21T23:08:35.517740Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=13698999288221351983) 2024-11-21T23:08:35.518127Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:140:2164]. Cookie: 13552684080305284431. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2024-11-21T23:08:35.518170Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:140:2164], cookie=13552684080305284431) 2024-11-21T23:08:37.755826Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:37.755963Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:37.784852Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:37.785036Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:37.818489Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:37.818968Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:130:2156], cookie=8910722851488042656, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2024-11-21T23:08:37.819195Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T23:08:37.852721Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:130:2156], cookie=8910722851488042656) 2024-11-21T23:08:37.853442Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:139:2163], cookie=5304211822980442881, path="/Root/Res", config={ }) 2024-11-21T23:08:37.853728Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2024-11-21T23:08:37.867773Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:139:2163], cookie=5304211822980442881) 2024-11-21T23:08:37.868773Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:144:2168]. Cookie: 18068239725009035478. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:37.868844Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:144:2168], cookie=18068239725009035478) 2024-11-21T23:08:37.869364Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:148:2172], cookie=11800836440200085967, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2024-11-21T23:08:37.869542Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2024-11-21T23:08:37.869695Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:37.887543Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:148:2172], cookie=11800836440200085967) 2024-11-21T23:08:37.888208Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:144:2168]. Cookie: 17684955075063602565. Data: { } 2024-11-21T23:08:37.888271Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:144:2168], cookie=17684955075063602565) ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWrite [FAIL] Test command err: Trying to start YDB, gRPC: 12186, MsgBus: 21800 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001146/r3tmp/tmphdnYfK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12186, node 1 TClient is connected to server localhost:21800 TClient is connected to server localhost:21800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... assertion failed at ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpDataIntegrityTrails::TTestCaseUpsertEvWrite::Execute_(NUnitTest::TTestContext &): (CountSubstr(ss.Str(), "DATA_INTEGRITY INFO: Component: Executer") == 2) failed: (0 != 2) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x17EDB68F 1. /-S/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:76: Execute_ @ 0x176464BF 2. /-S/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:19: operator() @ 0x17661D07 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:19:1) &> @ 0x17661D07 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:19:1) &> @ 0x17661D07 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17661D07 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17661D07 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x17F1A588 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x17F1A588 9. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x17F1A588 10. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x17EE21F8 11. /-S/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp:19: Execute @ 0x17660AB3 12. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x17EE3AC5 13. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x17F141CC 14. ??:0: ?? @ 0x7FE58EF40D8F 15. ??:0: ?? @ 0x7FE58EF40E3F 16. ??:0: ?? @ 0x1555F028 >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+StreamLookup-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite >> TRtmrTest::CreateWithoutTimeCastBuckets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2024-11-21T23:08:29.926696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:29.926779Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:29.926883Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:29.939752Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:29.940271Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T23:08:29.940572Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:29.951028Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:29.999600Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:30.000187Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:30.001633Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T23:08:30.001717Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T23:08:30.001815Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T23:08:30.002123Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:30.043544Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T23:08:30.043788Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:30.043971Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T23:08:30.044029Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T23:08:30.044071Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T23:08:30.044126Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:30.044504Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:30.044576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:30.044707Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T23:08:30.044812Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T23:08:30.045063Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:30.045108Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:30.045146Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T23:08:30.045176Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:30.045206Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:30.045236Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T23:08:30.045321Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:30.109113Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:30.109193Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:30.109256Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T23:08:30.119016Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T23:08:30.119112Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:30.119234Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T23:08:30.119439Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T23:08:30.119501Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T23:08:30.119576Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T23:08:30.119654Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:30.119698Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T23:08:30.119739Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T23:08:30.119779Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:30.120201Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:30.120251Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T23:08:30.120298Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T23:08:30.120339Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:30.120399Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T23:08:30.120433Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T23:08:30.120468Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T23:08:30.120530Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:30.120578Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:30.147623Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T23:08:30.147711Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:30.147759Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:30.147808Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T23:08:30.147897Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:30.148519Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:30.148574Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:30.148617Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T23:08:30.148771Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T23:08:30.148811Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:30.148974Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:30.149012Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:30.149047Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T23:08:30.149081Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T23:08:30.152985Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T23:08:30.153062Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:30.153313Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:30.153348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:30.153409Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:30.153461Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:30.153493Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:30.153531Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T23:08:30.153566Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T23:08:30.153612Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:30.153647Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:30.153677Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:30.153706Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:30.153894Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T23:08:30.153927Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:30.153948Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:30.153975Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T23:08:30.154001Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T23:08:30.154067Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:30.154088Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T23:08:30.154131Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:30.154162Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:30.154224Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T23:08:30.154256Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T23:08:30.154289Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T23:08:30.154343Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:30.154387Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:30.154442Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 1000005:149] at 9437186 on unit CompleteOperation 2024-11-21T23:08:37.644618Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:08:37.644648Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:37.644673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:37.645028Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2024-11-21T23:08:37.645072Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.645116Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2024-11-21T23:08:37.645275Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2024-11-21T23:08:37.645319Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.645349Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2024-11-21T23:08:37.645431Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2304]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T23:08:37.645458Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.645482Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T23:08:37.645551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:227:2222], Recipient [1:436:2386]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T23:08:37.645590Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T23:08:37.645640Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2024-11-21T23:08:37.645707Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2024-11-21T23:08:37.645753Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2024-11-21T23:08:37.645827Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T23:08:37.645973Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T23:08:37.646003Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.646037Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T23:08:37.646108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2304]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T23:08:37.646141Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.646185Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T23:08:37.646288Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:436:2386], Recipient [1:436:2386]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:37.646324Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:37.646373Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2024-11-21T23:08:37.648674Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:08:37.648749Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2024-11-21T23:08:37.648791Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2024-11-21T23:08:37.648858Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T23:08:37.648944Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2024-11-21T23:08:37.649016Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2024-11-21T23:08:37.649057Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2024-11-21T23:08:37.649715Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2024-11-21T23:08:37.649794Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T23:08:37.649855Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2024-11-21T23:08:37.649888Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2024-11-21T23:08:37.649924Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2024-11-21T23:08:37.649956Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T23:08:37.650245Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2024-11-21T23:08:37.650290Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2024-11-21T23:08:37.650325Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2024-11-21T23:08:37.650358Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2024-11-21T23:08:37.650459Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2024-11-21T23:08:37.650516Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2024-11-21T23:08:37.650564Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2024-11-21T23:08:37.650604Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:37.650635Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2024-11-21T23:08:37.650669Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2024-11-21T23:08:37.650702Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2024-11-21T23:08:37.651170Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T23:08:37.651224Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.651264Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T23:08:37.651355Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2304]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T23:08:37.651387Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.651420Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2024-11-21T23:08:37.651574Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T23:08:37.651615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.651648Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T23:08:37.651785Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T23:08:37.651835Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.651869Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T23:08:37.651984Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T23:08:37.652028Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.652055Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T23:08:37.652144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T23:08:37.652172Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.652199Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T23:08:37.668071Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:37.668141Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T23:08:37.668236Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 1 ms, propose latency: 3 ms 2024-11-21T23:08:37.668350Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T23:08:37.668401Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:37.668771Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T23:08:37.668818Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:37.668868Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TPersQueueTest::FetchRequest [GOOD] >> TPersQueueTest::EventBatching ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::WrongTableProfile [GOOD] Test command err: 2024-11-21T23:07:36.155560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873159275296323:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:36.156175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dcb/r3tmp/tmpUWm9CU/pdisk_1.dat 2024-11-21T23:07:36.648553Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:36.659177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:36.659270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:36.665869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25795, node 1 2024-11-21T23:07:37.101960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:37.101977Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:37.101987Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:37.102072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:37.511837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:37.517863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:37.517933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:37.531194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:37.531388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:37.531408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:07:37.540112Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:37.540146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:37.547373Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:37.550630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:37.567675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230457608, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:37.567745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:37.568002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:37.569602Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:37.569773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:37.569820Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:37.569929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:37.569974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:37.570011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:37.573577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:37.573630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:37.573646Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:37.573713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:18330 2024-11-21T23:07:38.025817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.026227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:38.026251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:38.035398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T23:07:38.035565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:38.035778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.035849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateSubDomain, at tablet72057594046644480 2024-11-21T23:07:38.035862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:38.038639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:38.038699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:38.038724Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:38.040059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:38.040103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:38.040133Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T23:07:38.042380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:38.042419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:07:38.045301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.045568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:38.064590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230458091, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:38.064633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet 72057594046644480 2024-11-21T23:07:38.064892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:07:38.072862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:38.073102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.073195Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:07:38.073264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:07:38.073314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:07:38.073356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T23:07:38.074160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:38.074203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:38.074224Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:07:38.074418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:38.074477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:38.074491Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:07:38.074543Z node 1 :FLAT_TX_S ... LocalPathId: 2], version: 3 2024-11-21T23:08:30.617729Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715658, subscribers: 1 2024-11-21T23:08:30.628235Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:08:30.629196Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:30.629239Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:08:30.641037Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant 2024-11-21T23:08:30.641362Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:08:30.644014Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715659, at schemeshard: 72057594046644480 2024-11-21T23:08:30.659454Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:08:30.660004Z node 13 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715659:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:08:30.660073Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 2 -> 3 2024-11-21T23:08:30.664051Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:31.152751Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439873395436990043:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:31.152840Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:08:31.165738Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:31.165869Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:31.169341Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2024-11-21T23:08:31.225889Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:31.524493Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 HandleReply TEvConfigureStatus operationId:281474976715659:0 at schemeshard:72057594046644480 2024-11-21T23:08:31.549947Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715659:0 HandleReply TEvConfigureStatus operationId:281474976715659:0 at schemeshard:72057594046644480 2024-11-21T23:08:31.550006Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 3 -> 128 2024-11-21T23:08:31.553616Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:08:31.565949Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230511606, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:31.566013Z node 13 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715659:0, at tablet 72057594046644480 2024-11-21T23:08:31.566439Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715659:0 128 -> 240 2024-11-21T23:08:31.569836Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:31.570326Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:31.570444Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715659:0 ProgressState 2024-11-21T23:08:31.570672Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715659:0 progress is 1/1 2024-11-21T23:08:31.570745Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2024-11-21T23:08:31.571024Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715659, publications: 1, subscribers: 1 2024-11-21T23:08:31.573927Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715659 2024-11-21T23:08:31.574023Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715659 2024-11-21T23:08:31.574055Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715659, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 4 2024-11-21T23:08:31.574158Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715659, subscribers: 1 TClient is connected to server localhost:19409 2024-11-21T23:08:31.803555Z node 13 :FLAT_TX_SCHEMESHARD INFO: Got new config: TableProfilesConfig { TableProfiles { Name: "default" CompactionPolicy: "default" ExecutionPolicy: "default" PartitioningPolicy: "default" StoragePolicy: "default" ReplicationPolicy: "default" CachingPolicy: "default" } TableProfiles { Name: "profile1" CompactionPolicy: "compaction1" ExecutionPolicy: "execution1" PartitioningPolicy: "partitioning1" StoragePolicy: "storage1" ReplicationPolicy: "replication1" CachingPolicy: "caching1" } TableProfiles { Name: "profile2" CompactionPolicy: "compaction2" ExecutionPolicy: "execution2" PartitioningPolicy: "partitioning2" StoragePolicy: "storage2" ReplicationPolicy: "replication2" CachingPolicy: "caching2" } CompactionPolicies { Name: "default" } CompactionPolicies { Name: "compaction1" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } CompactionPolicies { Name: "compaction2" CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } } ExecutionPolicies { Name: "default" } ExecutionPolicies { Name: "execution1" PipelineConfig { NumActiveTx: 1 EnableOutOfOrder: false DisableImmediate: false EnableSoftUpdates: true } ResourceProfile: "profile1" EnableFilterByKey: true ExecutorFastLogPolicy: false TxReadSizeLimit: 10000000 } ExecutionPolicies { Name: "execution2" PipelineConfig { NumActiveTx: 8 EnableOutOfOrder: true DisableImmediate: true EnableSoftUpdates: false } ResourceProfile: "profile2" EnableFilterByKey: false ExecutorFastLogPolicy: true TxReadSizeLimit: 20000000 } PartitioningPolicies { Name: "default" } PartitioningPolicies { Name: "partitioning1" UniformPartitionsCount: 10 AutoSplit: true AutoMerge: false SizeToSplit: 123456 } PartitioningPolicies { Name: "partitioning2" UniformPartitionsCount: 20 AutoSplit: true AutoMerge: true SizeToSplit: 1000000000 } StoragePolicies { Name: "default" } StoragePolicies { Name: "storage1" ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecLZ4 StorageConfig { SysLog { PreferredPoolKind: "hdd" } Log { PreferredPoolKind: "hdd" } Data { PreferredPoolKind: "hdd" } External { PreferredPoolKind: "hdd" } ExternalThreshold: 4294967295 } } } StoragePolicies { Name: "storage2" ColumnFamilies { Id: 0 ColumnCache: ColumnCacheEver StorageConfig { SysLog { PreferredPoolKind: "ssd" } Log { PreferredPoolKind: "ssd" } Data { PreferredPoolKind: "ssd" } External { PreferredPoolKind: "ssd" } DataThreshold: 30000 } } } ReplicationPolicies { Name: "default" } ReplicationPolicies { Name: "replication1" FollowerCount: 1 AllowFollowerPromotion: false CrossDataCenter: true } ReplicationPolicies { Name: "replication2" FollowerCount: 2 AllowFollowerPromotion: true CrossDataCenter: false } CachingPolicies { Name: "default" } CachingPolicies { Name: "caching1" ExecutorCacheSize: 10000000 } CachingPolicies { Name: "caching2" ExecutorCacheSize: 20000000 } } 2024-11-21T23:08:32.035745Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T23:08:32.036461Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:08:36.153154Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7439873395436990043:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:36.153295Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTTLTests::ConditionalErase >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2024-11-21T23:08:32.135439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:32.135499Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:32.135583Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:32.147781Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:32.148245Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T23:08:32.148530Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:32.158082Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:32.204009Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:32.204544Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:32.206036Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T23:08:32.206111Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T23:08:32.206175Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T23:08:32.206562Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:32.234671Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T23:08:32.234850Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:32.235005Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T23:08:32.235059Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T23:08:32.235096Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T23:08:32.235133Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:32.235461Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.235526Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.235630Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T23:08:32.235703Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T23:08:32.235908Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:32.235955Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:32.236004Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T23:08:32.236037Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:32.236069Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:32.236104Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T23:08:32.236141Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:32.292270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:32.292334Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:32.292391Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T23:08:32.295192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T23:08:32.295252Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:32.295336Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T23:08:32.295478Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T23:08:32.295519Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T23:08:32.295577Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T23:08:32.295623Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:32.295659Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T23:08:32.295695Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T23:08:32.295726Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:32.296022Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:32.296080Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T23:08:32.296119Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T23:08:32.296153Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:32.296200Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T23:08:32.296242Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T23:08:32.296276Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T23:08:32.296311Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:32.296356Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:32.324156Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T23:08:32.324232Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:32.324295Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:32.324343Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T23:08:32.324433Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:32.325049Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:32.325097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:32.325138Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T23:08:32.325274Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T23:08:32.325307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:32.325441Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:32.325492Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:32.325528Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T23:08:32.325563Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T23:08:32.329281Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T23:08:32.329355Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:32.329570Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.329607Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.329653Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:32.329689Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:32.329721Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:32.329758Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T23:08:32.329816Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T23:08:32.329863Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:32.329904Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:32.329935Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:32.329965Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:32.330123Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T23:08:32.330161Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:32.330186Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:32.330208Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T23:08:32.330272Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T23:08:32.330339Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:32.330366Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T23:08:32.330741Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:32.330792Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:32.330851Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T23:08:32.330898Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T23:08:32.330940Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T23:08:32.330983Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:32.331032Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:32.331074Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2024-11-21T23:08:39.455616Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:39.455637Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T23:08:39.455656Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2024-11-21T23:08:39.455678Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2024-11-21T23:08:39.456218Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2024-11-21T23:08:39.456293Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T23:08:39.456381Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:39.456420Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2024-11-21T23:08:39.456448Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2024-11-21T23:08:39.456473Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T23:08:39.456750Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2024-11-21T23:08:39.456786Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2024-11-21T23:08:39.456831Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2024-11-21T23:08:39.456869Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2024-11-21T23:08:39.456905Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:39.456966Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2024-11-21T23:08:39.457004Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2024-11-21T23:08:39.457056Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:39.457089Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:39.457137Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:39.457190Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:39.457501Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:335:2308], Recipient [2:335:2308]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:39.457546Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:39.457595Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2024-11-21T23:08:39.457633Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:39.457663Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T23:08:39.457697Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2024-11-21T23:08:39.457726Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2024-11-21T23:08:39.457758Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.457784Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2024-11-21T23:08:39.457808Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2024-11-21T23:08:39.457869Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2024-11-21T23:08:39.458636Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2024-11-21T23:08:39.458678Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.458717Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2024-11-21T23:08:39.458755Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2024-11-21T23:08:39.458787Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2024-11-21T23:08:39.458826Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.458847Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2024-11-21T23:08:39.458869Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:39.458895Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2024-11-21T23:08:39.458946Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2024-11-21T23:08:39.458977Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2024-11-21T23:08:39.459006Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2024-11-21T23:08:39.459046Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.459068Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:39.459098Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2024-11-21T23:08:39.459136Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2024-11-21T23:08:39.459189Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.459212Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2024-11-21T23:08:39.459237Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2024-11-21T23:08:39.459257Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2024-11-21T23:08:39.459283Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.459303Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2024-11-21T23:08:39.459323Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2024-11-21T23:08:39.459344Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2024-11-21T23:08:39.459370Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.459393Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2024-11-21T23:08:39.459417Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2024-11-21T23:08:39.459439Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2024-11-21T23:08:39.459466Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.459506Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T23:08:39.459536Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2024-11-21T23:08:39.459583Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2024-11-21T23:08:39.460006Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2024-11-21T23:08:39.460061Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T23:08:39.460111Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.460137Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2024-11-21T23:08:39.460161Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2024-11-21T23:08:39.460187Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T23:08:39.460344Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2024-11-21T23:08:39.460363Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2024-11-21T23:08:39.460384Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2024-11-21T23:08:39.460404Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2024-11-21T23:08:39.460423Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:39.460438Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2024-11-21T23:08:39.460454Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2024-11-21T23:08:39.460484Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:39.460535Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T23:08:39.460583Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T23:08:39.460611Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T23:08:39.483397Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2024-11-21T23:08:39.483481Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2024-11-21T23:08:39.483551Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:39.483602Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T23:08:39.483671Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:08:39.483729Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:39.484058Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2024-11-21T23:08:39.484106Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2024-11-21T23:08:39.484146Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T23:08:39.484175Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T23:08:39.484213Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:08:39.484259Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:39.846239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:39.846332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:39.846379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:39.847778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:39.847855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:39.847899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:39.847982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:39.848330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:39.932968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:39.933028Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:39.967511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:39.974926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:39.975166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:39.983797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:39.984599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:39.985292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:39.985666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:39.992863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:39.994312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:39.994381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:39.994701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:39.994750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:39.994792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:39.994909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.007402Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:40.135481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:40.135726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.135946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:40.136174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:40.136233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.138807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:40.138953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:40.139213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.139286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:40.139326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:40.139373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:40.142061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.142147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:40.142187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:40.145459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.145520Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.145565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:40.145626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:40.158670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:40.163542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:40.163814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:40.164895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:40.165043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:40.165092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:40.165433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:40.165479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:40.165663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:40.165740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:40.168111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:40.168168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:40.168371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:40.168409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:40.168813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.168864Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:40.168956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:40.168995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:40.169038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:40.169076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:40.169106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:40.169148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:40.169224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:40.169259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:40.169312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:40.171123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:40.171220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:40.171256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:40.171311Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:40.171354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:40.171456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : 1], version: 4 2024-11-21T23:08:40.209892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:40.214787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:08:40.214909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:08:40.214937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2024-11-21T23:08:40.214967Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2024-11-21T23:08:40.214997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:40.215080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2024-11-21T23:08:40.223560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.223636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId#100:0 at tablet72057594046678944 2024-11-21T23:08:40.223677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2024-11-21T23:08:40.226615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T23:08:40.226795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T23:08:40.228559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.228639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:40.228695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T23:08:40.228863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:40.232494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T23:08:40.232666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T23:08:40.232959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:40.233049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:40.233083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2024-11-21T23:08:40.233198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T23:08:40.233342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:40.233384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T23:08:40.236839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:40.236930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:40.237096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:08:40.237195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:40.237227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T23:08:40.237257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T23:08:40.237539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.237579Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T23:08:40.237674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T23:08:40.237707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T23:08:40.237750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T23:08:40.237789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T23:08:40.237823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T23:08:40.237876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T23:08:40.237954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:40.238007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T23:08:40.238037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T23:08:40.238161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T23:08:40.238901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:08:40.238980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:08:40.239009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T23:08:40.239063Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:08:40.239107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:40.239974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:08:40.240039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:08:40.240067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T23:08:40.240092Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T23:08:40.240131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:08:40.240187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T23:08:40.247830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T23:08:40.248135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2024-11-21T23:08:40.248338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T23:08:40.248380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2024-11-21T23:08:40.248776Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T23:08:40.248892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T23:08:40.248924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:308:2300] TestWaitNotification: OK eventTxId 100 2024-11-21T23:08:40.249382Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:40.249583Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 203us result status StatusSuccess 2024-11-21T23:08:40.249879Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2024-11-21T23:05:06.342866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:05:06.343224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:05:06.343297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0025bb/r3tmp/tmpjtn3Xn/pdisk_1.dat 2024-11-21T23:05:09.571489Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18946, node 1 2024-11-21T23:05:11.330865Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:05:11.330929Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:05:11.330967Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:05:11.331438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:05:11.360706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:05:11.537701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:11.538250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:11.566245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29951 2024-11-21T23:05:13.852447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:05:30.662863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:30.663538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:30.778096Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:05:30.818706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:32.086215Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:32.181961Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:05:32.182051Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:05:32.284331Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:05:32.285471Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:05:32.285708Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:05:32.285772Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:05:32.285829Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:05:32.285898Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:05:32.285966Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:05:32.286019Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:05:32.290532Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:05:32.570264Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:05:32.570385Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:05:32.582328Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T23:05:32.680310Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T23:05:32.680703Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T23:05:32.696646Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T23:05:32.825868Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:05:32.825958Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:05:32.826049Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T23:05:32.869000Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:05:32.869092Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:05:32.873638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:05:32.913767Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:05:32.913937Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:05:33.022210Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:05:33.079032Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:05:33.131604Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:05:34.193350Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:05:34.408494Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:05:36.332702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:36.333181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:36.408886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T23:05:37.509626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:05:37.509868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:37.510157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:37.510293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:37.510475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:37.510607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:05:37.510715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:05:37.510833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:05:37.510947Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:05:37.511054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:05:37.511185Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:05:37.511312Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2292:2842];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:05:37.666375Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:05:37.674813Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:05:37.675111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:05:37.675263Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:05:37.675389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:05:37.675503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2293:2843];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Cl ... ode 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T23:08:31.760741Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8291:6250], schemeshard count = 1 2024-11-21T23:08:34.246910Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:08:34.246973Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:08:34.247016Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 4] is column table. 2024-11-21T23:08:34.247069Z node 2 :STATISTICS DEBUG: [72075186224037897] Start schedule traversal navigate for path [OwnerId: 72075186224037889, LocalPathId: 4] 2024-11-21T23:08:34.252805Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Execute 2024-11-21T23:08:34.271667Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxNavigate::Complete 2024-11-21T23:08:34.272291Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Execute 2024-11-21T23:08:34.272382Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResolve::Complete 2024-11-21T23:08:34.273317Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 2 2024-11-21T23:08:34.273378Z node 2 :STATISTICS WARN: [72075186224037897] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2024-11-21T23:08:34.273431Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T23:08:35.583794Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Execute. Node count = 1 2024-11-21T23:08:35.616386Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxResponseTabletDistribution::Complete 2024-11-21T23:08:35.622525Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2024-11-21T23:08:35.623783Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8461:6344], server id = [2:8466:6349], tablet id = 72075186224037899, status = OK 2024-11-21T23:08:35.635211Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8461:6344], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.644333Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8462:6345], server id = [2:8467:6350], tablet id = 72075186224037900, status = OK 2024-11-21T23:08:35.644468Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8462:6345], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.644883Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2024-11-21T23:08:35.645738Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8463:6346], server id = [2:8468:6351], tablet id = 72075186224037901, status = OK 2024-11-21T23:08:35.645824Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8463:6346], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.646026Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2024-11-21T23:08:35.646222Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8461:6344], server id = [2:8466:6349], tablet id = 72075186224037899 2024-11-21T23:08:35.646273Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.648130Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8464:6347], server id = [2:8469:6352], tablet id = 72075186224037902, status = OK 2024-11-21T23:08:35.648221Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8464:6347], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.648299Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8465:6348], server id = [2:8470:6353], tablet id = 72075186224037903, status = OK 2024-11-21T23:08:35.648347Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8465:6348], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.648510Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2024-11-21T23:08:35.649474Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8462:6345], server id = [2:8467:6350], tablet id = 72075186224037900 2024-11-21T23:08:35.649506Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.649696Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8471:6354], server id = [2:8473:6356], tablet id = 72075186224037904, status = OK 2024-11-21T23:08:35.649766Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8471:6354], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.649901Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2024-11-21T23:08:35.661330Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2024-11-21T23:08:35.661805Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8463:6346], server id = [2:8468:6351], tablet id = 72075186224037901 2024-11-21T23:08:35.661842Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.661977Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8472:6355], server id = [2:8475:6358], tablet id = 72075186224037905, status = OK 2024-11-21T23:08:35.662063Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8472:6355], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.662317Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2024-11-21T23:08:35.662495Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8464:6347], server id = [2:8469:6352], tablet id = 72075186224037902 2024-11-21T23:08:35.662523Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.663226Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8474:6357], server id = [2:8476:6359], tablet id = 72075186224037906, status = OK 2024-11-21T23:08:35.663320Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8474:6357], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.663377Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8465:6348], server id = [2:8470:6353], tablet id = 72075186224037903 2024-11-21T23:08:35.663401Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.663486Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8471:6354], server id = [2:8473:6356], tablet id = 72075186224037904 2024-11-21T23:08:35.663507Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.664422Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8477:6360], server id = [2:8479:6362], tablet id = 72075186224037907, status = OK 2024-11-21T23:08:35.664512Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8477:6360], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.664612Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8478:6361], server id = [2:8480:6363], tablet id = 72075186224037908, status = OK 2024-11-21T23:08:35.664667Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8478:6361], path = { OwnerId: 72075186224037889 LocalId: 4 } 2024-11-21T23:08:35.665191Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2024-11-21T23:08:35.665634Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2024-11-21T23:08:35.665819Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2024-11-21T23:08:35.665997Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2024-11-21T23:08:35.666039Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T23:08:35.666228Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Execute 2024-11-21T23:08:35.666454Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxAggregateStatisticsResponse::Complete 2024-11-21T23:08:35.666887Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T23:08:35.667026Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8472:6355], server id = [2:8475:6358], tablet id = 72075186224037905 2024-11-21T23:08:35.667058Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.669064Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8474:6357], server id = [2:8476:6359], tablet id = 72075186224037906 2024-11-21T23:08:35.669093Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.669282Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8477:6360], server id = [2:8479:6362], tablet id = 72075186224037907 2024-11-21T23:08:35.669305Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.669568Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T23:08:35.669934Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8478:6361], server id = [2:8480:6363], tablet id = 72075186224037908 2024-11-21T23:08:35.669962Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2024-11-21T23:08:35.766731Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8497:6380]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:08:35.766986Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:08:35.767033Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8497:6380], StatRequests.size() = 1 2024-11-21T23:08:36.002051Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmNiODQ1ZjUtODQ4NWM2NTItNWU1NGZmNTctOTNmMDJkNmI=, TxId: 2024-11-21T23:08:36.002108Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmNiODQ1ZjUtODQ4NWM2NTItNWU1NGZmNTctOTNmMDJkNmI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2024-11-21T23:08:36.002576Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8505:6386]], StatType[ 2 ], StatRequestsCount[ 1 ] 2024-11-21T23:08:36.002779Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T23:08:36.003218Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T23:08:36.003258Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2024-11-21T23:08:36.006147Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T23:08:36.006229Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2024-11-21T23:08:36.006287Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037889, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2024-11-21T23:08:36.112687Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2024-11-21T23:07:36.992061Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873160591886682:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:36.992093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd0/r3tmp/tmpibQeKz/pdisk_1.dat 2024-11-21T23:07:37.695762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:37.695862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:37.705253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:37.733335Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1073, node 1 2024-11-21T23:07:37.921647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:07:37.942686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:37.943151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:07:38.064706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.071456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:38.071487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.071540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:38.071595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.122898Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:38.132570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:38.165621Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:38.165643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:38.165652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:38.165747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:38.858767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.870734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:38.870806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.873791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:38.873979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:38.873994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:38.876052Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:38.876076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:38.877652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:38.881542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230458924, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:38.881589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:38.881853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:38.883886Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:38.884415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:38.884583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.884627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:38.887649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:38.887734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:38.887781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:38.889511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:38.889550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:38.889563Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:38.889645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:07:41.587046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873182066724192:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:41.587155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:42.019693Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873160591886682:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:42.037543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:42.041621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Test, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:42.042071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:42.042603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:42.042634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:42.055758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Test 2024-11-21T23:07:42.056118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:42.056399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:42.056498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:07:42.058824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.058876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.058895Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:07:42.059194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:07:42.059213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:07:42.059225Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:07:42.064787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:07:42.071321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:07:42.071429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:07:42.083551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:07:42.186717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TE ... ublication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:11.277369Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 5 2024-11-21T23:08:11.277492Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:11.277508Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:11.277518Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 5 2024-11-21T23:08:11.277636Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:08:11.277659Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:08:11.277671Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T23:08:11.277710Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:08:11.284938Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439873311527194766:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:08:11.355768Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710659:0, path# /Root/.metadata/workload_manager/pools/default 2024-11-21T23:08:11.355973Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:08:11.358920Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: metadata@system, status: StatusAlreadyExists, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: default, set owner:metadata@system, add access: +(SR|DS):all-users@well-known, add access: +(SR|DS):root@builtin Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2024-11-21T23:08:17.275273Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439873337229736179:2250];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:17.275657Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dd0/r3tmp/tmpIRFU5R/pdisk_1.dat 2024-11-21T23:08:17.813593Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:17.901167Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:17.907538Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:17.920011Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5516, node 10 2024-11-21T23:08:18.207289Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:08:18.207312Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:08:18.207323Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:08:18.207471Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:08:18.749815Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:18.750241Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:08:18.750268Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:18.759217Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:08:18.759453Z node 10 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:08:18.759469Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T23:08:18.770212Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:08:18.770246Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 waiting... 2024-11-21T23:08:18.777093Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:18.782587Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:08:18.788061Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230498831, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:08:18.788103Z node 10 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:08:18.788403Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:08:18.790923Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:08:18.791110Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:08:18.791162Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:08:18.791276Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:08:18.791313Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:08:18.791378Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:08:18.793220Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2024-11-21T23:08:18.793262Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2024-11-21T23:08:18.793278Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:08:18.793385Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2024-11-21T23:08:22.257656Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439873337229736179:2250];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:22.257715Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 2024-11-21T23:08:32.809160Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:08:32.809213Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2024-11-21T23:08:33.429276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:33.429344Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:33.429470Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:33.443367Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:33.443879Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T23:08:33.444583Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:33.455500Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:33.497646Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:33.498253Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:33.499920Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T23:08:33.500001Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T23:08:33.500111Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T23:08:33.500433Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:33.528758Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T23:08:33.528954Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:33.529121Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T23:08:33.529184Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T23:08:33.529244Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T23:08:33.529287Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:33.529662Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.529726Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.529832Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T23:08:33.529928Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T23:08:33.530164Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:33.530218Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:33.530260Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T23:08:33.530348Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:33.530410Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:33.530445Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T23:08:33.530488Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:33.583524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.583602Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.583662Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T23:08:33.586411Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T23:08:33.586493Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:33.586599Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T23:08:33.586768Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T23:08:33.586814Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T23:08:33.586873Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T23:08:33.586926Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:33.586961Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T23:08:33.587000Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T23:08:33.587065Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:33.587398Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:33.587439Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T23:08:33.587475Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T23:08:33.587533Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:33.587633Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T23:08:33.587661Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T23:08:33.587700Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T23:08:33.587738Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:33.587787Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:33.612193Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T23:08:33.612261Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:33.612317Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:33.612364Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T23:08:33.612437Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:33.613044Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.613100Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.613153Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T23:08:33.613320Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T23:08:33.613359Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:33.613486Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:33.613532Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:33.613570Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T23:08:33.613606Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T23:08:33.617298Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T23:08:33.617369Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:33.617569Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.617605Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.617653Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:33.617709Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:33.617751Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:33.617790Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T23:08:33.617826Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T23:08:33.617871Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:33.617909Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:33.617939Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:33.617975Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:33.618160Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T23:08:33.618211Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:33.618239Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:33.618263Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T23:08:33.618286Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T23:08:33.618341Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:33.618370Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T23:08:33.618436Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:33.618498Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:33.618545Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T23:08:33.618572Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T23:08:33.618615Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T23:08:33.618647Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:33.618672Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:33.618699Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 9437184 2024-11-21T23:08:41.139260Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2024-11-21T23:08:41.139304Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:41.139343Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T23:08:41.139377Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:41.139574Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2304]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2024-11-21T23:08:41.139637Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.139677Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2024-11-21T23:08:41.139820Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2304]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2024-11-21T23:08:41.139853Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.139880Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2024-11-21T23:08:41.139963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:41.139992Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2024-11-21T23:08:41.140049Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:41.140096Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T23:08:41.140129Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:41.140265Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:41.140292Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2024-11-21T23:08:41.140327Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:41.140363Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T23:08:41.140386Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:41.140412Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T23:08:41.140443Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T23:08:41.140568Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:41.140596Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2024-11-21T23:08:41.140641Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:41.140687Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T23:08:41.140712Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:41.140744Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T23:08:41.140773Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T23:08:41.140793Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T23:08:41.140878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:41.140906Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2024-11-21T23:08:41.140943Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:41.140990Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T23:08:41.141016Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:41.141093Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:41.141113Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2024-11-21T23:08:41.141142Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:41.141200Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T23:08:41.141242Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:41.141348Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:41.141383Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2024-11-21T23:08:41.141417Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:41.141471Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T23:08:41.141497Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:41.141608Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:08:41.141633Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2024-11-21T23:08:41.141663Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:97:2132], exec latency: 2 ms, propose latency: 3 ms 2024-11-21T23:08:41.141699Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T23:08:41.141723Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:08:41.141946Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2304]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2024-11-21T23:08:41.141988Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.142024Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2024-11-21T23:08:41.142150Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2024-11-21T23:08:41.142180Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.142205Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2024-11-21T23:08:41.142262Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2024-11-21T23:08:41.142278Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.142306Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2024-11-21T23:08:41.142344Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:227:2222], Recipient [1:331:2304]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2024-11-21T23:08:41.142371Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.142485Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2024-11-21T23:08:41.142590Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2024-11-21T23:08:41.142622Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.142648Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2024-11-21T23:08:41.142711Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2024-11-21T23:08:41.142729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.142747Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2024-11-21T23:08:41.142791Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2024-11-21T23:08:41.142810Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.142824Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2024-11-21T23:08:41.142875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2024-11-21T23:08:41.142898Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.142915Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2024-11-21T23:08:41.142991Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:436:2386], Recipient [1:227:2222]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2024-11-21T23:08:41.143008Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:41.143021Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable >> TTxAllocatorClientTest::Boot |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange >> TTxAllocatorClientTest::Boot [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:42.152422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:42.152557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:42.152598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:42.152634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:42.152680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:42.152711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:42.152781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:42.153117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:42.228060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:42.228135Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:42.241529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:42.245500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:42.245694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:42.252032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:42.252730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:42.253359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:42.253638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:42.258028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:42.259292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:42.259355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:42.259569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:42.259647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:42.259692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:42.259798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.266168Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:42.393708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:42.393964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.394238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:42.394531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:42.394605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.400074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:42.400242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:42.400536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.400606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:42.400648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:42.400682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:42.407774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.407845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:42.407893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:42.410248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.410338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.410440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:42.410497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:42.414999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:42.417021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:42.417291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:42.418601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:42.418746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:42.418796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:42.419106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:42.419175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:42.419358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:42.419436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:42.421943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:42.421985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:42.422194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:42.422236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:42.422632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.422751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:42.422853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:42.422886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:42.422942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:42.423000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:42.423040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:42.423089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:42.423164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:42.423208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:42.423251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:42.425179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:42.425299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:42.425338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:42.425383Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:42.425428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:42.425545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:42.854921Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:08:42.854957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:08:42.855017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:08:42.863670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:08:42.863839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:42.865946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1416 } } 2024-11-21T23:08:42.865991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:42.866123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1416 } } 2024-11-21T23:08:42.866225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1416 } } 2024-11-21T23:08:42.867035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 401 RawX2: 4294969668 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:42.867084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:42.867205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 401 RawX2: 4294969668 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:42.867265Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:42.867348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 401 RawX2: 4294969668 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:42.867416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:42.867455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T23:08:42.870456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.870735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.899490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:42.899558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:08:42.899670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:42.899721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:42.899793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:42.899849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:42.899898Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.899938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:42.899984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:42.900008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:42.902142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.902591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.902661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2024-11-21T23:08:42.902712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T23:08:42.902754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2024-11-21T23:08:42.902836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T23:08:42.902884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2024-11-21T23:08:42.906578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:42.906653Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:42.906768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:42.906804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:42.906847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:42.906942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:338:2313] message: TxId: 102 2024-11-21T23:08:42.907013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:42.907050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:42.907081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:42.907223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:08:42.907257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:08:42.909226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:42.909274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:429:2393] TestWaitNotification: OK eventTxId 102 2024-11-21T23:08:42.909816Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:42.910058Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 271us result status StatusSuccess 2024-11-21T23:08:42.910508Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> YdbOlapStore::ManyTables [GOOD] >> YdbOlapStore::LogPagingBetween >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2024-11-21T23:08:43.429805Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:08:43.430222Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:08:43.430987Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:08:43.434318Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.434817Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:08:43.445170Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.445287Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.445339Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.445473Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:08:43.445650Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.445749Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:08:43.445880Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:43.118322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:43.118471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:43.118523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:43.118558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:43.118605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:43.118632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:43.118711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:43.119053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:43.207318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:43.207392Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:43.224759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:43.229240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:43.229505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:43.238048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:43.238726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:43.239405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.239731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:43.244394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.245788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:43.245852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.246071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:43.246149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:43.246203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:43.246295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.253180Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:43.412084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:43.412363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.412639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:43.412862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:43.412939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.415500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.415641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:43.415887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.415948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:43.415995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:43.416035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:43.418654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.418741Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:43.418784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:43.420780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.420839Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.420905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.420961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.424766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:43.426796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:43.427016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:43.428035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.428186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:43.428238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.428529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:43.428585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.428777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:43.428864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:43.431331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:43.431380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:43.431569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.431614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:43.432061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.432171Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:43.432286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:43.432326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.432387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:43.432430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.432468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:43.432498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:43.432582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:43.432624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:43.432659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:43.434731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:43.434878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:43.434941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:43.434978Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:43.435017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:43.435148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:08:43.445892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:08:43.446571Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:08:43.448078Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:08:43.473568Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:08:43.476170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:43.476617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.476765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 } }, at schemeshard: 72057594046678944 2024-11-21T23:08:43.477259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1732230523 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2024-11-21T23:08:43.478474Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:43.488641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1732230523 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:43.488864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1732230523 seconds (20048 days, 54 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T23:08:43.489664Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2024-11-21T23:08:32.619910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:32.623540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:32.623711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029af/r3tmp/tmpU0YAsw/pdisk_1.dat 2024-11-21T23:08:33.108963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:33.167631Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:33.221113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:33.221276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:33.235695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:33.382090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:33.854287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:34.205828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:884:2699], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:34.206051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2704], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:34.206119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:34.217246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:08:34.451770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:08:34.848353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fr1gv82r8cbn7892vz86m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U1ODc1MGYtMzRmNjFhZTEtYWRkZGE0N2EtYWIxZmNlZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:34.996228Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fr26g9rgb1yh9nrpkn8z7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWFjOTBhOTktMmY4MTcyZDQtODhhNzcyOTUtNTkwODU1MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:35.480327Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fr2bme6sc09sce5cm5dyq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY0MjYxODQtMTY4MzI4ZmUtZTA3MDVmNTYtYjQ2YTdiODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2024-11-21T23:08:35.643331Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fr2s818dcwp2fr615ep6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY0MjYxODQtMTY4MzI4ZmUtZTA3MDVmNTYtYjQ2YTdiODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2024-11-21T23:08:35.784475Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8fr2yj12aq91pwrsfdtzvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM1ODkwMTctODZhNmZhMjMtZjc4NjQxNTktNzQ2YjFhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2024-11-21T23:08:35.787679Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1128:2804] TxId: 281474976715665. Ctx: { TraceId: 01jd8fr2yj12aq91pwrsfdtzvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM1ODkwMTctODZhNmZhMjMtZjc4NjQxNTktNzQ2YjFhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2024-11-21T23:08:35.818040Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmM1ODkwMTctODZhNmZhMjMtZjc4NjQxNTktNzQ2YjFhMA==, ActorId: [1:1028:2804], ActorState: ExecuteState, TraceId: 01jd8fr2yj12aq91pwrsfdtzvq, Create QueryResponse for error on request, msg: 2024-11-21T23:08:35.822569Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjY0MjYxODQtMTY4MzI4ZmUtZTA3MDVmNTYtYjQ2YTdiODk=, ActorId: [1:1030:2806], ActorState: ExecuteState, TraceId: 01jd8fr2s818dcwp2fr615ep6e, Create QueryResponse for error on request, msg: 2024-11-21T23:08:35.823728Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd8fr2yj12aq91pwrsfdtzvq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM1ODkwMTctODZhNmZhMjMtZjc4NjQxNTktNzQ2YjFhMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:35.836523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd8fr2s818dcwp2fr615ep6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY0MjYxODQtMTY4MzI4ZmUtZTA3MDVmNTYtYjQ2YTdiODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:36.271088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd8fr3amakve0aq0by3h3q8v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmEyMGVmZTQtY2IyZjYyZjMtMWFlYWMzMzQtNTQzNmRkOGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2024-11-21T23:08:39.875693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:39.875872Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:08:39.876039Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029af/r3tmp/tmpQbJCGE/pdisk_1.dat 2024-11-21T23:08:40.180961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:40.210014Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:40.264090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:40.264246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:40.283667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:40.413781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:40.760680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:41.114449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:792:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:41.114568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:803:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:41.114650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:41.124246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:08:41.349472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2655], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:08:41.497057Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fr88rb1hpt2g7kqftbgae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzRmOTJjYjEtNzk0MzNiMDMtZjhlNjk4OTQtYzY1ZTcwYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:41.608530Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fr8ncecexbcf4vdby772v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWNjZDA3NzEtOGI1ZTAzYzgtZDExNWM5OWEtZDU2MDlhNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2024-11-21T23:08:42.398558Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:964:2776]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2024-11-21T23:08:42.399004Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzFmYmRmOTUtM2VlNWExZDItNDcyZWU0YjItZmNhYWFkYWY=, ActorId: [2:921:2738], ActorState: ExecuteState, TraceId: 01jd8fr8rvd8zxcsqyx4cp56tn, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:42.968953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:42.969081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:42.969149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:42.969211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:42.969255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:42.969286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:42.969363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:42.969746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:43.041530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:43.041606Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:43.071510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:43.075637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:43.075855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:43.084051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:43.084748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:43.085345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.085641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:43.092571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.093892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:43.093953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.094147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:43.094218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:43.094265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:43.094355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.111130Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:43.283106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:43.283362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.283584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:43.283820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:43.283896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.287223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.287366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:43.287608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.287665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:43.287711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:43.287746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:43.295357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.295437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:43.295483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:43.302961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.303040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.303108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.303174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.311961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:43.314134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:43.314325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:43.315423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.315577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:43.315621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.315859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:43.315902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.316075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:43.316159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:43.318788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:43.318825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:43.318971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.318998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:43.319393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.319500Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:43.319610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:43.319640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.319699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:43.319742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.319800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:43.319828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:43.319905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:43.319946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:43.319972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:43.321801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:43.321949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:43.321988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:43.322024Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:43.322061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:43.322174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:08:43.661369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2024-11-21T23:08:43.661886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.661989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:43.662040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:08:43.662246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T23:08:43.662353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:08:43.668068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:43.668121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:08:43.668469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.668503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:08:43.668598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.668644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:08:43.669339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:43.669399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:43.669441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:43.669471Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T23:08:43.669501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:08:43.669553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T23:08:43.672162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:08:43.685486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1206 } } 2024-11-21T23:08:43.685535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:43.685657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1206 } } 2024-11-21T23:08:43.685743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1206 } } FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:43.686491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:43.686555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:43.686693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:43.686737Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:43.686803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:43.686864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.686910Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.686951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:43.686984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:43.689302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.689614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.689892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.689937Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:43.690031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:43.690080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:43.690124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:43.690203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 102 2024-11-21T23:08:43.690246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:43.690282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:43.690312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:43.690458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:43.694224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:43.694275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:451:2415] TestWaitNotification: OK eventTxId 102 2024-11-21T23:08:43.694853Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:43.695177Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 317us result status StatusSuccess 2024-11-21T23:08:43.695681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T23:08:32.581866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:32.589689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:32.589891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029bf/r3tmp/tmpaWm5EE/pdisk_1.dat 2024-11-21T23:08:33.007522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:33.063739Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:33.115073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:33.115212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:33.126927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:33.247561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:33.286264Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:33.287293Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:33.287638Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:08:33.287802Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:33.333569Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:33.335391Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:33.335594Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:33.337435Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:08:33.337535Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:08:33.337605Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:08:33.337944Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:33.370152Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:08:33.370412Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:33.370546Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:08:33.370585Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:08:33.370622Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:08:33.370657Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:33.371086Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.371150Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.371642Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:08:33.371743Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:08:33.371879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.371917Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.371958Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:08:33.372015Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:08:33.372051Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:33.372104Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:08:33.372142Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:08:33.372179Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:08:33.372208Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:08:33.372244Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:08:33.372368Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:08:33.372407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:33.372563Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:08:33.372804Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:08:33.372859Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:08:33.372955Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:08:33.373008Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:08:33.373042Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:08:33.373073Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:08:33.373103Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:08:33.373374Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:33.373409Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:08:33.373441Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:08:33.373471Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:08:33.373527Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:08:33.373559Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:08:33.373594Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:08:33.373646Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:08:33.373683Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:33.375098Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:08:33.375140Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:08:33.386306Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:08:33.386373Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:08:33.386424Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:08:33.386515Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:08:33.386584Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:33.575969Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.576022Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.576074Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:08:33.576180Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:08:33.576213Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:33.576333Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:08:33.576382Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:08:33.576432Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:08:33.576479Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:08:33.593572Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:08:33.593653Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:33.594163Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.594202Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.594246Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:08:33.594302Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:33.594344Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:08:33.594387Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2841], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 457 DurationUs: 7000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 105 FinishTimeMs: 1732230523100 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 60 BuildCpuTimeUs: 45 WaitInputTimeUs: 4739 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230523093 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:43.102807Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1046:2841] 2024-11-21T23:08:43.102875Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T23:08:43.102922Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1047:2842], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T23:08:43.103308Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1047:2842], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 540 DurationUs: 8000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 268 FinishTimeMs: 1732230523101 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 208 BuildCpuTimeUs: 60 WaitInputTimeUs: 4849 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230523093 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:43.103355Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1047:2842] 2024-11-21T23:08:43.103399Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T23:08:43.103440Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1048:2843], CA [2:1049:2844], 2024-11-21T23:08:43.103766Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1048:2843], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 368 DurationUs: 9000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 165 FinishTimeMs: 1732230523102 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 133 BuildCpuTimeUs: 32 WaitInputTimeUs: 4924 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230523093 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:43.103810Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1048:2843] 2024-11-21T23:08:43.103853Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T23:08:43.103910Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], CA [2:1049:2844], 2024-11-21T23:08:43.104269Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1049:2844], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 622 DurationUs: 10000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 349 FinishTimeMs: 1732230523103 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 297 BuildCpuTimeUs: 52 WaitInputTimeUs: 6081 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230523093 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:43.104314Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1049:2844] 2024-11-21T23:08:43.104354Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T23:08:43.104393Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1050:2845], CA [2:1051:2846], 2024-11-21T23:08:43.104650Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1050:2845], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 433 DurationUs: 10000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 189 FinishTimeMs: 1732230523103 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 136 BuildCpuTimeUs: 53 WaitInputTimeUs: 7418 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230523093 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:43.104688Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1050:2845] 2024-11-21T23:08:43.104724Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1051:2846], 2024-11-21T23:08:43.104757Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1051:2846], 2024-11-21T23:08:43.104903Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2846], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 387 DurationUs: 10000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 161 FinishTimeMs: 1732230523104 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 127 BuildCpuTimeUs: 34 WaitInputTimeUs: 8041 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230523094 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:43.104938Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2846] 2024-11-21T23:08:43.105106Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:08:43.105184Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1039:2823] TxId: 281474976715667. Ctx: { TraceId: 01jd8fr9t49nkewddfarm499pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWVhMTg0NDQtZDUwZWI1NGYtZDdmYWZhZTEtYTBkOWM0NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003808s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite [GOOD] >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2024-11-21T23:08:45.103638Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:08:45.104102Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:08:45.104861Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:08:45.106668Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.107149Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:08:45.116979Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.117088Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.117140Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.117221Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:08:45.117399Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.117497Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:08:45.117627Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:08:45.118279Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T23:08:45.118802Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.118859Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.118951Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2024-11-21T23:08:45.118992Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 2024-11-21T23:08:45.119167Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T23:08:45.119421Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T23:08:45.119581Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T23:08:45.119751Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T23:08:45.119882Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T23:08:45.120327Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.120398Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.120538Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2024-11-21T23:08:45.120587Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 5000 to# 10000 2024-11-21T23:08:45.120778Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T23:08:45.120968Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T23:08:45.121157Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T23:08:45.121428Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2024-11-21T23:08:45.121543Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T23:08:45.121884Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.121943Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:45.122057Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2024-11-21T23:08:45.122094Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 10000 to# 15000 2024-11-21T23:08:45.122276Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:40.893101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:40.893205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:40.893258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:40.893292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:40.893332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:40.893363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:40.893437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:40.893742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:40.968230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:40.968285Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:40.979879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:40.984004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:40.984200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:40.991358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:40.992064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:40.992703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:40.993020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:40.997549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:40.998805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:40.998865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:40.999084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:40.999151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:40.999197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:40.999304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:41.005792Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:41.163018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:41.163260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:41.163495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:41.163695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:41.163753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:41.165863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:41.165994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:41.166171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:41.166220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:41.166256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:41.166287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:41.168144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:41.168189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:41.168223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:41.169843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:41.169883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:41.169944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:41.170004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:41.173901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:41.175613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:41.175780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:41.176740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:41.176865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:41.176909Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:41.177171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:41.177230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:41.177398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:41.177485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:41.179388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:41.179423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:41.179566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:41.179603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:41.179892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:41.180254Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:41.180377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:41.180407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:41.180461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:41.180500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:41.180549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:41.180577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:41.180647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:41.180695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:41.180723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:41.182547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:41.182656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:41.182692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:41.182728Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:41.182761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:41.182847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Size 619 rowCount 2 cpuUsage 0 2024-11-21T23:08:44.855152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T23:08:44.855293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.855485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.855668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640232500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:08:44.855776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640232500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:08:44.855873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640232500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:08:44.855943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640232500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:08:44.856012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040232500 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:08:44.856076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640232500 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:08:44.857071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T23:08:44.857830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T23:08:44.858341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:44.858748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:44.858857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T23:08:44.859063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T23:08:44.859192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.859248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T23:08:44.861189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.861233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T23:08:44.872897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.872969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2024-11-21T23:08:44.874154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.874204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:44.878721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.879166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.879582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.879662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.232500Z, at schemeshard: 72057594046678944 2024-11-21T23:08:44.879722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.879752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.232500Z, at schemeshard: 72057594046678944 2024-11-21T23:08:44.879875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.879921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:44.882753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.882868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.882941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.234500Z, at schemeshard: 72057594046678944 2024-11-21T23:08:44.883040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.883078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.883121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.883176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.883203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.883250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.883296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.234500Z, at schemeshard: 72057594046678944 2024-11-21T23:08:44.883345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.883393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.883429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.234500Z, at schemeshard: 72057594046678944 2024-11-21T23:08:44.883455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.963410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2024-11-21T23:08:44.963642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T23:08:44.963735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0 2024-11-21T23:08:44.963797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T23:08:44.963918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2024-11-21T23:08:44.963958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0 2024-11-21T23:08:44.964002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 2, DataSize 603 2024-11-21T23:08:44.964110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T23:08:44.964148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0 2024-11-21T23:08:44.964178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 0, DataSize 0 2024-11-21T23:08:44.964223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2024-11-21T23:08:44.964254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0 2024-11-21T23:08:44.964277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 5: RowCount 1, DataSize 43 2024-11-21T23:08:44.964332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 619 row count 2 2024-11-21T23:08:44.964375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0 2024-11-21T23:08:44.964410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409550 followerId=0, pathId 6: RowCount 2, DataSize 619, with borrowed parts 2024-11-21T23:08:44.982831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.982906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2024-11-21T23:08:44.989525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:08:44.989730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:08:44.989790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.236500Z, at schemeshard: 72057594046678944 2024-11-21T23:08:44.989859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: 2024-11-21T23:08:34.287214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:34.290617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:34.290763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029a7/r3tmp/tmpwUMlWW/pdisk_1.dat 2024-11-21T23:08:34.759091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:34.801187Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:34.864092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:34.864208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:34.879518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:35.008935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:35.404306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T23:08:35.745001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:35.745147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:35.745232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:35.754474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:08:35.972755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:08:36.304325Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fr30wc8tqncjvasw56jpa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM0OTA0YTgtMTg1MDg5YTAtNzQ2Y2JkOTktZmNmMDYyNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:36.410335Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fr3kb0x35fv94xf6653vq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE4NDIzN2MtMzI1MDE3NTEtZGY1MjVjNGItNGM3ZTA4OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2024-11-21T23:08:36.816740Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fr3q805rn9rrma1s07ajx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjEyODRiN2UtZmJmMTcxMjMtMjZjZTM0NmUtMjAyNGE4N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T23:08:36.945348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fr4325xt6rvmvma0g28xp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjEyODRiN2UtZmJmMTcxMjMtMjZjZTM0NmUtMjAyNGE4N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2024-11-21T23:08:37.456431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8fr4h844mxrh50gm1d2ne8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY2Mzg0ZjAtNDdjZTA1ZGMtN2RkMzc1NjAtNjQyZTQ4NDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2024-11-21T23:08:41.530336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:41.530541Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:08:41.530741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029a7/r3tmp/tmp0mzLR3/pdisk_1.dat 2024-11-21T23:08:41.824050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:41.852316Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:41.905295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:41.905418Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:41.917013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:42.033075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:42.342346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:42.709307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:42.709412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:42.709538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:42.720286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:08:42.955294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:08:43.114800Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fr9tkakj3sr337pg2hd3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTM0YmUyYTctNzdhNTUzOGUtNDI4MDgyMWQtNmI4ZTBjZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:43.245065Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fra808x4bbr62173fg8yn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTI5NzNiNGMtOGFlYTY1NGEtYTI3OTIxOWQtOWY0MmQ4NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2024-11-21T23:08:44.150757Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8frb4xcxf5nrfae0z7qmx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWQ2MzZiYTQtYjAwNTU2MDctYjYxZTZkMDMtMmJkZTI0ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:44.239675Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd8frb7x48sr62yvahw7e5bf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODg1YjQzNC05M2Y0MDQ3OS03OTYzZjkxOS05NWE5MTlhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 >> TTxAllocatorClientTest::InitiatingRequest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> TTxAllocatorClientTest::InitiatingRequest [GOOD] >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TSchemeShardTTLTests::ShouldSkipDroppedColumn |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T23:08:35.508372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:35.511727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:35.511909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002996/r3tmp/tmp8CcfPQ/pdisk_1.dat 2024-11-21T23:08:36.003651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:36.062253Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:36.112168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:36.115194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:36.131662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:36.258319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:36.644622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T23:08:36.976515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:36.976606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:36.976665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:36.981983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:08:37.201849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:08:37.534868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fr47e1nxhd6482gxhr6fs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2QwNTJkZjktMmM5OTM2N2QtZTY4MzRkOTQtNTRmY2RjYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:37.655565Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fr4t54jgmsrwvbnn3r0ww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZlY2Y0ODgtNjdlZmI2MDgtN2QwYjU5ZjEtNDI3NGJkNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2024-11-21T23:08:38.054003Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fr4xq798w59mgadeedd79, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjczYzM2Mi1iNTBkZjRmLTRkMGJiNTY1LTgyZTRlN2Zk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T23:08:38.188644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fr59t5tw95wp1mr6sd26z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjczYzM2Mi1iNTBkZjRmLTRkMGJiNTY1LTgyZTRlN2Zk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2024-11-21T23:08:38.737235Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8fr5s2a4gqwk2jdpy8trmf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFhNDlmNzEtOGQ0NzVkYTUtNGUzYTg0ZDMtM2E2NGY5MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2024-11-21T23:08:42.670854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:42.671048Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:08:42.671229Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002996/r3tmp/tmp1hfI9H/pdisk_1.dat 2024-11-21T23:08:42.936199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:42.968818Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:43.019033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:43.019176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:43.030832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:43.159639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:43.473345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2024-11-21T23:08:43.800160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:43.800273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:801:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:43.800355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:43.805470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:08:44.002768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:804:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:08:44.135511Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8frawp0t9h58x4g7k6vgh0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjhiNGM1NDUtNWY3MDY1ZGUtNDFjMmIxMjctOGY2MzFmNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715661 TEvProposeTransaction 281474976715661 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 889 RawX2: 8589937235 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\ty\003\000\000\000\000\000\000\021S\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\002\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\001\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=NjhiNGM1NDUtNWY3MDY1ZGUtNDFjMmIxMjctOGY2MzFmNWI=\222\001%\n\007TraceId\022\03201jd8frawp0t9h58x4g7k6vgh0\222\001\026\n\022CurrentExecutionId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001\021\n\006PoolId\022\007default\230\001\000\"\n\010\256\243\022\020\0020\000@\n" TxId: 281474976715661 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715661 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715661 OrderId: 281474976715661 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 13 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 291 } } 2024-11-21T23:08:44.253430Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8frb7teybm13n5q75arnq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRjY2ZjZTktOGY5ZjUwNzgtZGM5NDU1OWYtZWEwYjIwZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715662 TEvProposeTransaction 281474976715662 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 911 RawX2: 8589937308 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\217\003\000\000\000\000\000\000\021\234\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\004\002\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\002\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\026\n\022CurrentExecutionId\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=ODRjY2ZjZTktOGY5ZjUwNzgtZGM5NDU1OWYtZWEwYjIwZmM=\222\001\026\n\022CustomerSuppliedId\022\000\222\001%\n\007TraceId\022\03201jd8frb7teybm13n5q75arnq7\222\001\014\n\010Database\022\000\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\230\001\000\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715662 ExecLevel: 0 Flags: 8 EvWriteResult 281474976715662 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715662 OrderId: 281474976715662 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 13 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 270 } } ===== Begin SELECT 2024-11-21T23:08:44.652654Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8frbbvcwk582vk46m8k88h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmNhNmM3ODYtYzk5YmJjNDktZTgwNzliNjMtZGUyZDBmYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2024-11-21T23:08:44.780335Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8frbqs3j56ynazfxxsxjhj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmNhNmM3ODYtYzk5YmJjNDktZTgwNzliNjMtZGUyZDBmYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\365\006\010\001\022\223\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_1\022\r\010\240\234\001\022\005\t\000\002\006\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\r/Root/table-1\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\003\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004\200\001\000\222\001\021\n\006PoolId\022\007default\222\001\023\n\nDatabaseId\022\005/Root\222\001\026\n\022CustomerSuppliedId\022\000\222\001%\n\007TraceId\022\03201jd8frbqs3j56ynazfxxsxjhj\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=MmNhNmM3ODYtYzk5YmJjNDktZTgwNzliNjMtZGUyZDBmYzM=\222\001\026\n\022CurrentExecutionId\022\000\222\001\014\n\010Database\022\000\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 Processing EvWrite row 281474976715664 TEvProposeTransaction 281474976715664 is observed and will be replaced with EvWrite: TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937330 } TxBody: " \0008\000`\200\200\200\005j\367\006\010\001\022\225\006\010\002\022\024\n\022\t\321\003\000\000\000\000\000\000\021\262\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\006\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\000\000\360?\200\0014\"+\n\032%kqp%tx_result_binding_0_0\022\r\010\240\234\001\022\005\t\000\002\010\004\030\0012\002r\000:\237\001\nDtype.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta\022W\n#\n\014\010\200\202\224\204\200\200\200\200\001\020\003\022\r/Root/table-2\030\001*\0000\001\0320\n\016\022\014\022\n\001\000\004\000\000\000\004\000\000\000\032\r\n\t\010\001\022\003key\030\002\020\004\032\017\n\013\010\002\022\005value\030\002\020\004H\001\200\001\000\222\001\026\n\022CurrentExecutionId\022\000\222\001\023\n\nDatabaseId\022\005/Root\222\001\014\n\010Database\022\000\222\001Z\n\tSessionId\022Mydb://session/3?node_id=2&id=MmNhNmM3ODYtYzk5YmJjNDktZTgwNzliNjMtZGUyZDBmYzM=\222\001%\n\007TraceId\022\03201jd8frbqs3j56ynazfxxsxjhj\222\001\021\n\006PoolId\022\007default\222\001\026\n\022CustomerSuppliedId\022\000\230\001\000\032O\n#\t\217\023\000\000\000\000\001\000\021\001\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0038\000\020\200\200\204\200\200\200\204\200\001\020\201\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001\030\201\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715664 ExecLevel: 0 Flags: 0 EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037888 TxId: 281474976715664 MinStep: 2012 MaxStep: 32012 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037888 Generation: 1 Step: 15 ActorId { RawX1: 630 RawX2: 8589937128 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 260 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_PREPARED Origin: 72075186224037889 TxId: 281474976715664 MinStep: 2012 MaxStep: 32012 DomainCoordinators: 72057594046316545 TabletInfo { TabletId: 72075186224037889 Generation: 1 Step: 14 ActorId { RawX1: 718 RawX2: 8589937190 } IsFollower: false } TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 188 } } ... captured readset ... captured readset ===== restarting tablet EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037889 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 3 Name: "/Root/table-2" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 763 } } EvWriteResult 281474976715664 is observed and will be replaced with EvProposeTransactionResult: Status: STATUS_COMPLETED Origin: 72075186224037888 TxId: 281474976715664 Step: 2500 OrderId: 281474976715664 TxStats { TableAccessStats { TableInfo { SchemeshardId: 72057594046644480 PathId: 2 Name: "/Root/table-1" } UpdateRow { Count: 1 Rows: 1 Bytes: 8 } } PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 789 } } ===== Waiting for commit response ===== Last SELECT 2024-11-21T23:08:45.279956Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8frc711rzzdny6jrv08k35, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjY4MjU5N2QtYTcyZjcwOWMtMWI2MGJjN2MtZDNkZTczNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-StreamLookup+EvWrite [GOOD] Test command err: 2024-11-21T23:08:35.392163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:35.402013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:35.402203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00299e/r3tmp/tmp3LMkVu/pdisk_1.dat 2024-11-21T23:08:35.818516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:35.922510Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:35.980361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:35.980530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:35.994464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:36.122193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:36.170057Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:36.171696Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:36.172220Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:08:36.172445Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:36.220237Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:36.220982Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:36.221076Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:36.227228Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:08:36.227334Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:08:36.227403Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:08:36.227769Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:36.284793Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:08:36.285009Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:36.285151Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:08:36.285193Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:08:36.285231Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:08:36.285273Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:36.285703Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:36.285779Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:36.286272Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:08:36.286372Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:08:36.286526Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:36.286561Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:36.286608Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:08:36.286660Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:08:36.286698Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:36.286751Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:08:36.286794Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:08:36.286828Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:08:36.286857Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:08:36.286898Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:08:36.287024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:08:36.287064Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:36.287182Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:08:36.287416Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:08:36.287473Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:08:36.287570Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:08:36.287631Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:08:36.287669Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:08:36.287705Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:08:36.287741Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:08:36.288075Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:36.288117Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:08:36.288149Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:08:36.288180Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:08:36.288239Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:08:36.288271Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:08:36.288302Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:08:36.288352Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:08:36.288380Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:36.289758Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:08:36.289810Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:08:36.300537Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:08:36.300623Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:08:36.300669Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:08:36.300722Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:08:36.300796Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:36.495423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:36.495481Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:36.495553Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:08:36.495661Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:08:36.495690Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:36.495816Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:08:36.495873Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:08:36.495916Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:08:36.495974Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:08:36.500317Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:08:36.500404Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:36.500986Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:36.501037Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:36.501084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:08:36.501136Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:36.501183Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:08:36.501228Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1008:2807], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 534 DurationUs: 9000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 118 FinishTimeMs: 1732230525311 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 77 BuildCpuTimeUs: 41 WaitInputTimeUs: 5732 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230525302 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:45.312552Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1008:2807] 2024-11-21T23:08:45.312614Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T23:08:45.312658Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], CA [2:1009:2808], 2024-11-21T23:08:45.313175Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1009:2808], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 643 DurationUs: 9000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 366 FinishTimeMs: 1732230525311 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 319 BuildCpuTimeUs: 47 WaitInputTimeUs: 5612 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230525302 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:45.313231Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1009:2808] 2024-11-21T23:08:45.313288Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T23:08:45.313327Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1010:2809], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T23:08:45.313650Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1010:2809], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 515 DurationUs: 9000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 291 FinishTimeMs: 1732230525312 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 246 BuildCpuTimeUs: 45 WaitInputTimeUs: 5779 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230525303 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:45.313687Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1010:2809] 2024-11-21T23:08:45.313728Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T23:08:45.313793Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1011:2810], CA [2:1012:2811], 2024-11-21T23:08:45.314302Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1011:2810], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 682 DurationUs: 10000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 403 FinishTimeMs: 1732230525313 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 333 BuildCpuTimeUs: 70 WaitInputTimeUs: 7254 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230525303 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:45.314360Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1011:2810] 2024-11-21T23:08:45.314418Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T23:08:45.314485Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1013:2812], CA [2:1012:2811], 2024-11-21T23:08:45.314771Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1012:2811], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 396 DurationUs: 11000 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 167 FinishTimeMs: 1732230525314 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 108 BuildCpuTimeUs: 59 WaitInputTimeUs: 8236 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230525303 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:45.314807Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1012:2811] 2024-11-21T23:08:45.314853Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1013:2812], 2024-11-21T23:08:45.314888Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1013:2812], 2024-11-21T23:08:45.315005Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1013:2812], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 378 DurationUs: 11000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 156 FinishTimeMs: 1732230525314 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 118 BuildCpuTimeUs: 38 WaitInputTimeUs: 8726 HostName: "ghrun-uc25mmfz34" NodeId: 2 StartTimeMs: 1732230525303 } MaxMemoryUsage: 1048576 } 2024-11-21T23:08:45.315052Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1013:2812] 2024-11-21T23:08:45.315238Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:08:45.315306Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1001:2789] TxId: 281474976715665. Ctx: { TraceId: 01jd8frbysamrj1dd6aw7m5cws, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODRiZWM4MTItMTc4N2EzZTUtZDBiMGUxNjEtYmJlMGRlYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.004174s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> DataShardTxOrder::DelayData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2024-11-21T23:08:46.561294Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:08:46.561816Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:08:46.562713Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:08:46.564751Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:46.565369Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:08:46.576815Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:46.576977Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:46.577041Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:46.577129Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:08:46.577348Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:46.577490Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:08:46.577680Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:08:46.578522Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T23:08:46.579180Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:46.579290Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:46.579434Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2024-11-21T23:08:46.579484Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 >> BuildStatsHistogram::Many_Serial [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> BasicUsage::BrokenCredentialsProvider [GOOD] >> TSchemeShardTTLTests::CheckCounters |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2024-11-21T23:08:26.201742Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:26.201903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:26.229232Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:26.229371Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:26.261824Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:27.050718Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:27.050831Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:27.070512Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:27.070843Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:27.099724Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:27.937060Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:27.937167Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:27.959763Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:27.959892Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:27.999086Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:27.999533Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:128:2154], cookie=16613807838628744852, session=0, seqNo=0) 2024-11-21T23:08:27.999658Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:28.019493Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:128:2154], cookie=16613807838628744852, session=1) 2024-11-21T23:08:28.020083Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:137:2161], cookie=10388097422709084308) 2024-11-21T23:08:28.020170Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:137:2161], cookie=10388097422709084308) 2024-11-21T23:08:28.368251Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:28.387464Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:28.690463Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:28.708719Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:28.982832Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.003565Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.286797Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.300547Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.577109Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.589348Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.863371Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.879620Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.143968Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.161220Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.429501Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.443098Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.716730Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.731128Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.095086Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.111262Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.384229Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.399330Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.687470Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.706057Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.998680Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.015242Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.322773Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.342101Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.676398Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.689256Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.965644Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.987269Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.262807Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.280031Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.558808Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.575489Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.862721Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.879122Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.165168Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.181907Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.493199Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.511512Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.796559Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.814446Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.098699Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.119213Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.399856Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.415959Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.701325Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.717924Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.034727Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.051294Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.321759Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.345081Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.629595Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.643819Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.921665Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.940683Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:37.258725Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:37.274180Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:37.563453Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:37.579420Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:37.850838Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:37.865813Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:38.166453Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:38.186640Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:38.462923Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:38.477331Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:38.750975Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:38.767452Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:39.027862Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:39.043301Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:39.326866Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:39.344762Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:39.639035Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:39.659198Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:39.972419Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:39.997094Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:40.317203Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:40.334346Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:40.602564Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:40.619787Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:40.894078Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:40.907209Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:41.162095Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:41.174542Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:41.469287Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:41.499153Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:41.826766Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:41.843712Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:42.150303Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:42.164354Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:42.496120Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:42.517100Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:42.797417Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:42.811462Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:43.094740Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:43.114377Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:43.441356Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:43.455632Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:43.721007Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:43.737138Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:44.018900Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:44.043385Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:44.311229Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:44.327164Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:44.610785Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:44.631174Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:44.908904Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:44.928352Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:45.204112Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:45.221415Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:45.488625Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:45.502777Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:45.765796Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:45.783350Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:46.054745Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:46.068429Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:46.416070Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2024-11-21T23:08:46.416171Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T23:08:46.429965Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2024-11-21T23:08:46.441570Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:469:2476], cookie=11386912880716305634) 2024-11-21T23:08:46.441694Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:469:2476], cookie=11386912880716305634) 2024-11-21T23:08:47.159702Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:47.159822Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:47.185562Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:47.185741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:47.218901Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:47.225250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:130:2156], cookie=4968250257351961666, path="Root", config={ MaxUnitsPerSecond: 100 }) 2024-11-21T23:08:47.225511Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2024-11-21T23:08:47.238260Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:130:2156], cookie=4968250257351961666) 2024-11-21T23:08:47.240069Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:139:2163]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:47.240147Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:139:2163], cookie=0) 2024-11-21T23:08:47.240429Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2024-11-21T23:08:47.240460Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=0) 2024-11-21T23:08:47.289003Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:47.289114Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2024-11-21T23:08:47.289414Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:144:2168]) 2024-11-21T23:08:47.289559Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2024-11-21T23:08:47.339113Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:139:2163]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2024-11-21T23:08:30.315253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:30.315302Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:30.315380Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:30.336940Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:30.337440Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T23:08:30.337773Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:30.348347Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:30.409698Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:30.410263Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:30.411869Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T23:08:30.411952Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T23:08:30.412018Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T23:08:30.412328Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:30.440102Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T23:08:30.440280Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:30.440448Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T23:08:30.440520Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T23:08:30.440548Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T23:08:30.440575Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:30.440898Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:30.440953Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:30.441083Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T23:08:30.441169Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T23:08:30.441456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:30.441498Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:30.441542Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T23:08:30.441574Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:30.441605Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:30.441633Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T23:08:30.441666Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:30.493851Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:30.493914Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:30.494175Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T23:08:30.501458Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T23:08:30.501521Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:30.501618Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T23:08:30.501762Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T23:08:30.501822Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T23:08:30.501876Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T23:08:30.501923Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:30.501956Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T23:08:30.501984Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T23:08:30.502045Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:30.502328Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:30.502364Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T23:08:30.502416Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T23:08:30.502457Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:30.502503Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T23:08:30.502545Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T23:08:30.502583Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T23:08:30.502621Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:30.502666Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:30.539088Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T23:08:30.539177Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:30.539210Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:30.539249Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T23:08:30.539326Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:30.539844Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:30.539890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:30.539929Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T23:08:30.540096Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T23:08:30.540124Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:30.540274Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:30.540329Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:30.540362Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T23:08:30.540410Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T23:08:30.544460Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T23:08:30.544558Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:30.544809Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:30.544853Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:30.544905Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:30.544945Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:30.544977Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:30.545017Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T23:08:30.545070Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T23:08:30.545117Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:30.545152Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:30.545183Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:30.545230Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:30.545397Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T23:08:30.545434Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:30.545458Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:30.545478Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T23:08:30.545512Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T23:08:30.545602Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:30.545626Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T23:08:30.545659Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:30.545687Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:30.545736Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T23:08:30.545789Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T23:08:30.545825Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T23:08:30.545880Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:30.545901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:30.545933Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... e 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T23:08:47.093596Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T23:08:47.093625Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2024-11-21T23:08:47.093649Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T23:08:47.093669Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T23:08:47.093691Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2024-11-21T23:08:47.093709Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2024-11-21T23:08:47.094028Z node 2 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2024-11-21T23:08:47.094120Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T23:08:47.094172Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T23:08:47.094193Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2024-11-21T23:08:47.094213Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2024-11-21T23:08:47.094233Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2024-11-21T23:08:47.094416Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2024-11-21T23:08:47.094451Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2024-11-21T23:08:47.094483Z node 2 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2024-11-21T23:08:47.094511Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2024-11-21T23:08:47.094538Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2024-11-21T23:08:47.094556Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2024-11-21T23:08:47.094579Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2024-11-21T23:08:47.094616Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:47.094670Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:47.094712Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2024-11-21T23:08:47.095070Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:231:2226], Recipient [2:231:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:47.095116Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:47.095156Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:47.095205Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:47.095234Z node 2 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2024-11-21T23:08:47.095263Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2024-11-21T23:08:47.095287Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.095354Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:47.095387Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:47.095412Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:47.095886Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2024-11-21T23:08:47.095945Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.095969Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:47.095990Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T23:08:47.096011Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T23:08:47.096042Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.096076Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T23:08:47.096097Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:47.096129Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:47.096174Z node 2 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2024-11-21T23:08:47.096197Z node 2 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2024-11-21T23:08:47.096219Z node 2 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2024-11-21T23:08:47.096246Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.096265Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:47.096284Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T23:08:47.096321Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2024-11-21T23:08:47.096359Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.096377Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T23:08:47.096402Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T23:08:47.096430Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2024-11-21T23:08:47.096455Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.096472Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T23:08:47.096501Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T23:08:47.096545Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2024-11-21T23:08:47.096570Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.096588Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T23:08:47.096607Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T23:08:47.096625Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2024-11-21T23:08:47.096683Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.096713Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T23:08:47.096732Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2024-11-21T23:08:47.096750Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2024-11-21T23:08:47.097018Z node 2 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2024-11-21T23:08:47.097060Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T23:08:47.097103Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:47.097129Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2024-11-21T23:08:47.097160Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2024-11-21T23:08:47.097200Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2024-11-21T23:08:47.097352Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2024-11-21T23:08:47.097378Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2024-11-21T23:08:47.097400Z node 2 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2024-11-21T23:08:47.097438Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2024-11-21T23:08:47.097468Z node 2 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2024-11-21T23:08:47.097508Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2024-11-21T23:08:47.097528Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2024-11-21T23:08:47.097551Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:47.097570Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:47.097596Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:47.097629Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:47.114974Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2024-11-21T23:08:47.115075Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2024-11-21T23:08:47.115133Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:47.115176Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2024-11-21T23:08:47.115232Z node 2 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:08:47.115278Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:47.115457Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:47.115480Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2024-11-21T23:08:47.115512Z node 2 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [2:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:08:47.115548Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2024-11-21T23:08:34.837866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:34.856478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:34.856683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029a6/r3tmp/tmpcaDhMo/pdisk_1.dat 2024-11-21T23:08:35.287606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:35.341006Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:35.400230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:35.400372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:35.414862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:35.538535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:35.934632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:08:36.271469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:791:2646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:36.271585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:800:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:36.271658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:36.277208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:08:36.488343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:805:2654], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:08:36.780173Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fr3hd5n6qsc08n4mnjbgb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMyZDRiZWItOGI0NGRmMDgtOTYxNzg1YzAtMTc0MTBkNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:36.881312Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fr42hcrjd0tk1egjm2v8c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU1YjA4YTktYzQyZjc0NTAtMmU3NzFmNGEtOTY2NzY2Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2024-11-21T23:08:37.614408Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fr4et0se1jwna089ac2qm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwNTI1ZTYtYzg4ZGUwYzctY2U5ZjFlYmUtZWZlN2E2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2024-11-21T23:08:38.002957Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fr4we515vpkw90zpr8cd7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkxZDc1NmMtNjdhYTI1NDQtNTU4ZmQzMTYtYWE1NDY3YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2024-11-21T23:08:38.104341Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8fr57z9ta0eskh1jhzzzgc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkxZDc1NmMtNjdhYTI1NDQtNTU4ZmQzMTYtYWE1NDY3YTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2024-11-21T23:08:38.594431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd8fr5r3c0ybkqy8qq3jfbhf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZlMzk0OTQtOTg0MTEzYi0yMmIyMTgxMS1kMmJjZjc5Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2024-11-21T23:08:38.705430Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd8fr5tmbqvv9j2jhgb0h4wx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwNTI1ZTYtYzg4ZGUwYzctY2U5ZjFlYmUtZWZlN2E2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2024-11-21T23:08:38.799282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd8fr5y00b8mr36g1gy4skg3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwNTI1ZTYtYzg4ZGUwYzctY2U5ZjFlYmUtZWZlN2E2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2024-11-21T23:08:38.870867Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDkwNTI1ZTYtYzg4ZGUwYzctY2U5ZjFlYmUtZWZlN2E2YWE=, ActorId: [1:930:2747], ActorState: ExecuteState, TraceId: 01jd8fr60k08nr780gmn6kqcna, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2024-11-21T23:08:38.883433Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd8fr60k08nr780gmn6kqcna, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwNTI1ZTYtYzg4ZGUwYzctY2U5ZjFlYmUtZWZlN2E2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:43.325543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:43.325736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:08:43.325912Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029a6/r3tmp/tmp9LXqZh/pdisk_1.dat 2024-11-21T23:08:43.614112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:43.652308Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:43.700467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:43.700650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:43.715740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:43.834030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:43.856922Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:43.858369Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:43.858926Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:630:2536] 2024-11-21T23:08:43.859199Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:43.904292Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:622:2530], Recipient [2:630:2536]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:43.905014Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:43.905111Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:43.907353Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:08:43.907453Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:08:43.907549Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:08:43.907930Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:43.908003Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:08:43.908117Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:43.908225Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:647:2545] 2024-11-21T23:08:43.908270Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:08:43.908338Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:08:43.908388Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:43.908830Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:630:2536], Recipient [2:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:43.908892Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:43.909463Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:08:43.909559Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:08:43.909652Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:638:2540], Recipient [2:630:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:43.909694Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:43.909753Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T23:08:43.909851Z node 2 :TX_DAT ... :984:2745], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:08:45.844826Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:45.845882Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:630:2536]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T23:08:45.846003Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:718:2598]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 2 Status: STATUS_NOT_FOUND 2024-11-21T23:08:45.857136Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T23:08:45.857311Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:630:2536], Recipient [2:718:2598]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T23:08:45.857349Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:45.857414Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2024-11-21T23:08:45.857588Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T23:08:45.857670Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:718:2598], Recipient [2:630:2536]: {TEvReadSet step# 3001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T23:08:45.857711Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:08:45.857743Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2024-11-21T23:08:46.832298Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8frd9w1thtd4pf04vnb5cv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTQ0MTI5MGQtYzVhMGRjMWQtMzMxZjhiMmQtZjI0M2JiNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:46.838438Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1051:2841], Recipient [2:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 KeysSize: 1 2024-11-21T23:08:46.838591Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:08:46.838702Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2024-11-21T23:08:46.838848Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:08:46.838907Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:08:46.838958Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:46.839005Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:08:46.839063Z node 2 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2024-11-21T23:08:46.839117Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:08:46.839151Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:46.839177Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:08:46.839202Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:08:46.839345Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T23:08:46.839628Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T23:08:46.839694Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2024-11-21T23:08:46.839749Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1051:2841], 0} after executionsCount# 1 2024-11-21T23:08:46.839811Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1051:2841], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:08:46.839907Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1051:2841], 0} finished in read 2024-11-21T23:08:46.840008Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:08:46.840038Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:08:46.840070Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:08:46.840101Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:08:46.840155Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:08:46.840184Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:08:46.840216Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2024-11-21T23:08:46.840266Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:08:46.840376Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T23:08:46.840794Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:630:2536]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2024-11-21T23:08:46.841006Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1053:2842], Recipient [2:718:2598]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 KeysSize: 1 2024-11-21T23:08:46.841077Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T23:08:46.841130Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2024-11-21T23:08:46.841190Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T23:08:46.841218Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2024-11-21T23:08:46.841244Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:46.841271Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T23:08:46.841311Z node 2 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2024-11-21T23:08:46.841342Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T23:08:46.841368Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:46.841394Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T23:08:46.841418Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2024-11-21T23:08:46.841508Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 } 2024-11-21T23:08:46.841715Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2024-11-21T23:08:46.841757Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2024-11-21T23:08:46.841788Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1053:2842], 0} after executionsCount# 1 2024-11-21T23:08:46.841824Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1053:2842], 0} sends rowCount# 1, bytes# 24, quota rows left# 1000, quota bytes left# 5242856, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:08:46.841876Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1053:2842], 0} finished in read 2024-11-21T23:08:46.841927Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T23:08:46.841953Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T23:08:46.841975Z node 2 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:08:46.842003Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:08:46.842039Z node 2 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2024-11-21T23:08:46.842062Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:08:46.842087Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2024-11-21T23:08:46.842113Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T23:08:46.842174Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T23:08:46.842718Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:54:2101], Recipient [2:718:2598]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2024-11-21T23:08:46.843485Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1051:2841], Recipient [2:630:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T23:08:46.843550Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2024-11-21T23:08:46.844900Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1053:2842], Recipient [2:718:2598]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T23:08:46.844948Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2024-11-21T23:07:51.809555Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1732230471809516 2024-11-21T23:07:52.929109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873229622499376:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:52.929556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:53.055294Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873232683503057:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:53.055346Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:53.415731Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:53.427500Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001334/r3tmp/tmpZJdVad/pdisk_1.dat 2024-11-21T23:07:54.008470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:54.067101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:54.067149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:54.067263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:54.067625Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:54.068118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:54.068164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:54.089522Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:07:54.089669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:54.094853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11363, node 1 2024-11-21T23:07:54.378036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001334/r3tmp/yandex0dg0Li.tmp 2024-11-21T23:07:54.378068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001334/r3tmp/yandex0dg0Li.tmp 2024-11-21T23:07:54.378224Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001334/r3tmp/yandex0dg0Li.tmp 2024-11-21T23:07:54.378325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:07:54.499285Z INFO: TTestServer started on Port 2949 GrpcPort 11363 TClient is connected to server localhost:2949 PQClient connected to localhost:11363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:55.193999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:07:55.362621Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:57.922554Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873229622499376:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:57.922653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:58.057025Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873232683503057:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:58.057096Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:58.980393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873254158339872:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:58.980492Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873254158339860:2283], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:58.980652Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:58.987477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:07:59.034706Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873254158339875:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:07:59.420200Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.266309s 2024-11-21T23:07:59.420254Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.266382s 2024-11-21T23:07:59.578884Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873259687271431:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:59.581139Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzdjOGE0OTYtODJjMTgwN2EtMzk4MzcxZDctOWUxNDA2NTU=, ActorId: [1:7439873259687271396:2305], ActorState: ExecuteState, TraceId: 01jd8fpzhh19yd8pdh3c6gp219, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:59.587376Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:59.596246Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873258453307210:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:59.598192Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjA1ZGFmZjUtZGIwODVlMWUtNjc3MTRhNzAtYTFkMGRhZmU=, ActorId: [2:7439873254158339858:2282], ActorState: ExecuteState, TraceId: 01jd8fpz3x9pgh66hj49gd3v0y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:59.600680Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:59.635583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:07:59.987620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:08:00.272716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:11363", true, true, 1000); 2024-11-21T23:08:01.141877Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8fq0rva0b0j2t3cq3kyjpq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY3NzkzZGYtYTg4ZTMwOTQtMThiNTQ0OTQtNDcxZTZmNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439873268277206447:3000] === CheckClustersList. Ok 2024-11-21T23:08:06.532272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:1 ... 21T23:08:45.447657Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:20167 2024-11-21T23:08:45.495043Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T23:08:45.497010Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T23:08:45.497050Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T23:08:45.500596Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T23:08:45.500776Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:45824 2024-11-21T23:08:45.500815Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:45824 proto=v1 topic=test-topic durationSec=0 2024-11-21T23:08:45.500828Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:08:45.502843Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T23:08:45.502987Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T23:08:45.503000Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T23:08:45.503011Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T23:08:45.503032Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873457923878055:2471] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T23:08:45.505830Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873457923878055:2471] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T23:08:45.728163Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873457923878055:2471] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T23:08:45.739497Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439873457923878097:2471] connected; active server actors: 1 2024-11-21T23:08:45.746187Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873457923878055:2471] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T23:08:45.746227Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873457923878055:2471] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T23:08:45.749054Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439873457923878097:2471] disconnected; active server actors: 1 2024-11-21T23:08:45.749093Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439873457923878097:2471] disconnected no session 2024-11-21T23:08:45.922045Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873457923878055:2471] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T23:08:45.922103Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873457923878055:2471] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T23:08:45.922124Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873457923878055:2471] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:08:45.922158Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:08:45.922750Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 5, Generation: 1 2024-11-21T23:08:45.922772Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:45.922797Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7439873457923878122:2471], now have 1 active actors on pipe 2024-11-21T23:08:45.922817Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:08:45.922835Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:08:45.922912Z node 5 :PERSQUEUE INFO: new Cookie src|ec74b26f-423a8739-c119f49a-93227df3_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:08:45.923124Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:08:45.923171Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:08:45.923387Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:08:45.923409Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:08:45.923469Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:08:45.923569Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|ec74b26f-423a8739-c119f49a-93227df3_0 2024-11-21T23:08:45.924341Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732230525924 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:08:45.924453Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ec74b26f-423a8739-c119f49a-93227df3_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:08:45.926017Z :INFO: [] MessageGroupId [src] SessionId [src|ec74b26f-423a8739-c119f49a-93227df3_0] Write session: close. Timeout = 0 ms 2024-11-21T23:08:45.926059Z :INFO: [] MessageGroupId [src] SessionId [src|ec74b26f-423a8739-c119f49a-93227df3_0] Write session will now close 2024-11-21T23:08:45.926096Z :DEBUG: [] MessageGroupId [src] SessionId [src|ec74b26f-423a8739-c119f49a-93227df3_0] Write session: aborting 2024-11-21T23:08:45.926600Z :INFO: [] MessageGroupId [src] SessionId [src|ec74b26f-423a8739-c119f49a-93227df3_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:08:45.926637Z :DEBUG: [] MessageGroupId [src] SessionId [src|ec74b26f-423a8739-c119f49a-93227df3_0] Write session: destroy 2024-11-21T23:08:45.927460Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|ec74b26f-423a8739-c119f49a-93227df3_0 grpc read done: success: 0 data: 2024-11-21T23:08:45.927484Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec74b26f-423a8739-c119f49a-93227df3_0 grpc read failed 2024-11-21T23:08:45.927501Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec74b26f-423a8739-c119f49a-93227df3_0 grpc closed 2024-11-21T23:08:45.927516Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|ec74b26f-423a8739-c119f49a-93227df3_0 is DEAD 2024-11-21T23:08:45.928519Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:08:45.929989Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:08:45.930025Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439873457923878122:2471] destroyed 2024-11-21T23:08:45.930064Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:08:45.984198Z :INFO: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] Starting read session 2024-11-21T23:08:45.984267Z :DEBUG: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] Starting session to cluster null (localhost:20167) 2024-11-21T23:08:45.986434Z :DEBUG: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:45.986515Z :DEBUG: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:45.986568Z :DEBUG: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T23:08:45.988300Z :ERROR: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2024-11-21T23:08:45.988369Z :DEBUG: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:45.988401Z :DEBUG: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:45.988560Z :INFO: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2024-11-21T23:08:45.988752Z :NOTICE: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:08:45.988789Z :DEBUG: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2024-11-21T23:08:45.988892Z :INFO: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] Closing read session. Close timeout: 0.000000s 2024-11-21T23:08:45.988931Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:08:45.988967Z :INFO: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:08:45.989066Z :NOTICE: [/Root] [/Root] [8dbeeb3d-ffff3b0f-f9fd7753-e3fe1ad1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: Got : 24000 2106439 49449 9 9 Expected: 24000 2106439 49449 9 9 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 49449 9 9 Expected: 12816 1121048 49449 9 9 Got : 24000 3547100 81694 9 9 Expected: 24000 3547100 81694 9 9 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425282 81694 9 9 Expected: 9582 1425282 81694 9 9 Got : 24000 2460139 23760 9 9 Expected: 24000 2460139 23760 9 9 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060767 23760 9 9 Expected: 10440 1060767 23760 9 9 Got : 24000 4054050 46562 9 9 Expected: 24000 4054050 46562 9 9 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2273213 46562 9 9 Expected: 13570 2273213 46562 9 9 Got : 24000 2106459 49449 9 9 Expected: 24000 2106459 49449 9 9 Got : 24000 2460219 23555 9 9 Expected: 24000 2460219 23555 9 9 Got : 24000 4054270 46543 9 9 Expected: 24000 4054270 46543 9 9 Got : 24000 2106439 25272 38 44 Expected: 24000 2106439 25272 38 44 { [2455,2599), [2798,3624), [4540,4713), [5654,7161), [8509,8794), [8936,9973), [11888,14280), [14337,14882), [15507,16365), [17368,19451), [19536,20135), [20790,21503), [21589,23243) } Got : 12816 1121048 25272 20 23 Expected: 12816 1121048 25272 20 23 Got : 24000 3547100 49916 64 44 Expected: 24000 3547100 49916 64 44 { [1012,1475), [1682,1985), [2727,3553), [3599,3992), [5397,7244), [9181,9807), [9993,10178), [12209,14029), [15089,15342), [16198,16984), [17238,18436), [21087,21876), [23701,23794) } Got : 9582 1425198 49916 26 17 Expected: 9582 1425198 49916 26 17 Got : 24000 2460139 13170 42 41 Expected: 24000 2460139 13170 42 41 { [1296,2520), [3888,4320), [5040,6840), [6912,7272), [10872,11160), [11520,12096), [12096,13824), [15192,15624), [17064,17856), [18216,19296), [19800,20160), [20736,21096), [21096,22104) } Got : 10440 1060798 13170 18 18 Expected: 10440 1060798 13170 18 18 Got : 24000 4054050 29361 68 43 Expected: 24000 4054050 29361 68 43 { [460,1518), [2300,2484), [2760,4002), [4600,5842), [6302,9752), [11178,12328), [14582,14858), [16790,18032), [18216,18446), [18722,19504), [19504,19964), [20378,20470), [21344,23506) } Got : 13570 2277890 29361 38 24 Expected: 13570 2277890 29361 38 24 Got : 24000 2106459 25428 38 44 Expected: 24000 2106459 25428 38 44 Got : 24000 2460219 13482 41 41 Expected: 24000 2460219 13482 41 41 Got : 24000 4054270 29970 67 43 Expected: 24000 4054270 29970 67 43 Got : 24000 2106479 25458 38 44 Expected: 24000 2106479 25458 38 44 Got : 24000 2460259 13528 42 41 Expected: 24000 2460259 13528 42 41 Got : 24000 4054290 30013 67 43 Expected: 24000 4054290 30013 67 43 1 parts: 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 0% bytes, 4 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 205073 (actual 205115 - 0% error) 14% (actual 14%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 17416844 (actual 17420850 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24056 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48061 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72061 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96085 - 0% error) 10% (actual 10%) key = (400354, 133459) value = 120047 (actual 120093 - 0% error) 10% (actual 10%) key = (480133, 160052) value = 144053 (actual 144100 - 0% error) 10% (actual 10%) key = (560080, 186701) value = 168060 (actual 168102 - 0% error) 10% (actual 10%) key = (639892, 213305) value = 192073 (actual 192119 - 0% error) 10% (actual 10%) key = (719776, 239933) value = 216090 (actual 216137 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2042645 - 0% error) 10% (actual 10%) key = (159427, 53150) value = 4076220 (actual 4080259 - 0% error) 10% (actual 10%) key = (239872, 79965) value = 6113940 (actual 6117932 - 0% error) 10% (actual 10%) key = (319834, 106619) value = 8152983 (actual 8156951 - 0% error) 10% (actual 10%) key = (400105, 133376) value = 10190566 (actual 10194584 - 0% error) 10% (actual 10%) key = (479833, 159952) value = 12228261 (actual 12232212 - 0% error) 10% (actual 10%) key = (559774, 186599) value = 14265925 (actual 14269984 - 0% error) 10% (actual 10%) key = (639385, 213136) value = 16304923 (actual 16308915 - 0% error) 10% (actual 10%) key = (719437, 239820) value = 18342658 (actual 18346641 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 51 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 217180 (actual 217228 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 18443184 (actual 18447186 - 0% error) 9% (actual 9%) { [12965,17271), [20685,27602), [31405,43682), [58051,73731), [81074,85635), [86559,89297), [92588,112654), [134937,148111), [152568,158136), [169526,171272), [181381,184364), [188301,199001), [201179,227534) } 1 parts: 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 3% bytes, 111 pages RowCountHistogram: 6% (actual 6%) key = (80152, 26725) value = 7654 (actual 7700 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 21908 (actual 21959 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 37729 (actual 37776 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 44561 (actual 44610 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 62406 (actual 62455 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 70269 (actual 70314 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 83950 (actual 83996 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 96207 (actual 96256 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 110645 (actual 110694 - 0% error) 12% (actual 12%) DataSizeHistogram: 6% (actual 6%) key = (80152, 26725) value = 650681 (actual 654673 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 1862907 (actual 1866988 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 3200081 (actual 3204123 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 3780473 (actual 3784554 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 5294670 (actual 5298760 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 5965285 (actual 5969310 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 7125413 (actual 7129406 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 8166922 (actual 8170966 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 9391370 (actual 9395383 - 0% error) 12% (actual 12%) { [12965,17271), [20685,27602), [31405,43682), [58051,73731), [81074,85635), [86559,89297), [92588,112654), [134937,148111), [152568,158136), [169526,171272), [181381,184364), [188301,199001), [201179,227534) } Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (109672, 36565) value = 12716 (actual 12760 - 0% error) 10% (actual 10%) key = (200011, 66678) value = 25439 (actual 25485 - 0% error) 10% (actual 10%) key = (242497, 80840) value = 38151 (actual 38197 - 0% error) 10% (actual 10%) key = (323278, 107767) value = 50861 (actual 50910 - 0% error) 9% (actual 9%) key = (365755, 121926) value = 63568 (actual 63614 - 0% error) 10% (actual 10%) key = (482191, 160738) value = 76283 (actual 76335 - 0% error) 10% (actual 9%) key = (610882, 203635) value = 88992 (actual 89039 - 0% error) 10% (actual 10%) key = (673702, 224575) value = 101722 (actual 101768 - 0% error) 10% (actual 10%) key = (715753, 238592) value = 114435 (actual 114484 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (109522, 36515) value = 1078779 (actual 1082732 - 0% error) 10% (actual 10%) key = (199786, 66603) value = 2157298 (actual 2161219 - 0% error) ... (307549, NULL) (307615, NULL) (307678, NULL) (307744, NULL) 100 rows, 100 pages, 4 levels: (307810, NULL) (307876, NULL) (307939, NULL) (308005, NULL) (308065, NULL) 100 rows, 100 pages, 4 levels: (308131, NULL) (308194, NULL) (308260, NULL) (308320, NULL) (308386, NULL) 100 rows, 100 pages, 4 levels: (308452, NULL) (308518, NULL) (308587, NULL) (308650, NULL) (308719, NULL) 100 rows, 100 pages, 4 levels: (308779, NULL) (308842, NULL) (308908, NULL) (308974, NULL) (309049, NULL) 100 rows, 100 pages, 4 levels: (309115, NULL) (309181, NULL) (309247, NULL) (309319, NULL) (309385, NULL) 100 rows, 100 pages, 4 levels: (309448, NULL) (309511, NULL) (309580, NULL) (309649, NULL) (309715, NULL) 100 rows, 100 pages, 4 levels: (309775, NULL) (309850, NULL) (309922, NULL) (309994, NULL) (310060, NULL) 100 rows, 100 pages, 4 levels: (310132, NULL) (310195, NULL) (310264, NULL) (310327, NULL) (310396, NULL) 100 rows, 100 pages, 4 levels: (310465, NULL) (310534, NULL) (310594, NULL) (310660, NULL) (310726, NULL) 100 rows, 100 pages, 4 levels: (310801, NULL) (310867, NULL) (310945, NULL) (311011, NULL) (311077, NULL) 100 rows, 100 pages, 4 levels: (311140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> TKesusTest::TestAcquireDowngrade [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2024-11-21T23:08:39.210270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:08:39.212901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:08:39.213013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00298b/r3tmp/tmp9KZiwL/pdisk_1.dat 2024-11-21T23:08:39.695010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:08:39.766266Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:39.822767Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:39.824354Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:08:39.824671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:08:39.824811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:08:39.839997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:08:39.977135Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:08:39.977212Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:08:39.977405Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:08:40.147505Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:08:40.148157Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:08:40.148297Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:08:40.148710Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:08:40.148944Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:08:40.149050Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:08:40.149387Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:08:40.151109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:08:40.153287Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:08:40.153397Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:08:40.190921Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:40.192373Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:40.192943Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:638:2540] 2024-11-21T23:08:40.193232Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:40.207779Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:638:2540]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:40.248562Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:40.250116Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:40.250339Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:40.252224Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:08:40.252312Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:08:40.252373Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:08:40.252870Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:40.280557Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:08:40.280828Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:40.280968Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:666:2557] 2024-11-21T23:08:40.281017Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:08:40.281070Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:08:40.281111Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:40.281240Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:40.281720Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:641:2542] 2024-11-21T23:08:40.281936Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:40.294646Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:624:2532], Recipient [1:641:2542]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:40.295489Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:638:2540], Recipient [1:638:2540]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:40.295565Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:40.296085Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:08:40.296191Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:08:40.296647Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:08:40.296719Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:40.296785Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:08:40.296837Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:08:40.296875Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:08:40.296915Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:08:40.296971Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:08:40.297188Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:653:2548], Recipient [1:638:2540]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:40.297231Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:40.297293Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:630:2536], serverId# [1:653:2548], sessionId# [0:0:0] 2024-11-21T23:08:40.297813Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:653:2548] 2024-11-21T23:08:40.297867Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:40.298004Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:08:40.298274Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:08:40.298343Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:08:40.298466Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:08:40.298511Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:08:40.298573Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:08:40.298613Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:08:40.298649Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:08:40.298938Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:40.298995Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:08:40.299032Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:08:40.299081Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:08:40.299140Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:08:40.299178Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:08:40.299218Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:08:40.299253Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:08:40.299281Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:40.299901Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:40.300104Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:40.301460Z node 1 :TX_DATASHARD DEBUG: LoadChangeRec ... 2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T23:08:48.959305Z node 2 :KQP_EXECUTER INFO: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T23:08:48.959348Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T23:08:48.959395Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T23:08:48.959443Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T23:08:48.959485Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T23:08:48.959884Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:931:2745], Recipient [2:888:2712]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:48.959939Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:48.959987Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:930:2744], serverId# [2:931:2745], sessionId# [0:0:0] 2024-11-21T23:08:48.960246Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:927:2728], Recipient [2:888:2712]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 927 RawX2: 8589937320 } TxBody: " \0018\000`\200\200\200\005j\244\006\010\001\022\223\006\010\001\022\024\n\022\t\237\003\000\000\000\000\000\000\021\250\n\000\000\002\000\000\000\032\257\002\010\240\215\006\022\210\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000 2024-11-21T23:08:48.960279Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:48.960413Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:888:2712], Recipient [2:888:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:08:48.960448Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:08:48.960515Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:08:48.960956Z node 2 :TX_DATASHARD TRACE: TxId: 281474976715662, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2024-11-21T23:08:48.961051Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2024-11-21T23:08:48.961120Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2024-11-21T23:08:48.961410Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CheckDataTx 2024-11-21T23:08:48.961477Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T23:08:48.961523Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T23:08:48.961560Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:48.961596Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:08:48.961636Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T23:08:48.961697Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715662] at 72075186224037888 2024-11-21T23:08:48.961740Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T23:08:48.961776Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:48.961806Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T23:08:48.961827Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T23:08:48.961878Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T23:08:48.961934Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715662] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191936 2024-11-21T23:08:48.962161Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T23:08:48.962216Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:08:48.962245Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T23:08:48.962281Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:08:48.962314Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2024-11-21T23:08:48.962408Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:48.962443Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:08:48.962495Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:08:48.962527Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:08:48.962569Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2024-11-21T23:08:48.962594Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:08:48.962624Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2024-11-21T23:08:48.974101Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:08:48.974190Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2024-11-21T23:08:48.974251Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T23:08:48.974336Z node 2 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2024-11-21T23:08:48.974433Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:48.975382Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [2:24:2071], Recipient [2:888:2712]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 2001} 2024-11-21T23:08:48.975430Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2024-11-21T23:08:48.975475Z node 2 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2024-11-21T23:08:48.975529Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:08:48.975608Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T23:08:48.975742Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:08:48.975809Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T23:08:48.975874Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:927:2728] TxId: 281474976715662. Ctx: { TraceId: 01jd8frfvkc4pnvn9tmjjza63p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmFmMDI1YWYtMTE1N2Q4MjItNTRiMzY4MDQtNTQwMTYzZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TSchemeShardTTLTestsWithReboots::MoveTable |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:49.775666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:49.775775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:49.775819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:49.775852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:49.775893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:49.775919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:49.775989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:49.776323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:49.862640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:49.862695Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:49.887381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:49.890302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:49.890525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:49.898637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:49.899474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:49.900110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:49.900390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:49.908419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:49.909720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:49.909779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:49.909998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:49.910060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:49.910109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:49.910208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.918745Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:50.049929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:50.050145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.050359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:50.050604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:50.050663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.054364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.054518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:50.054758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.054816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:50.054857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:50.054891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:50.057279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.057342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:50.057376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:50.059269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.059322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.059388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.059435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.063567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:50.065507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:50.065682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:50.066755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.066873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:50.066916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.067134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:50.067187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.067361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:50.067425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:50.071164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:50.071203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:50.071364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:50.071402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:50.071703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.071805Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:50.071916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:50.071946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.071996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:50.072034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.072076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:50.072113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:50.072184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:50.072227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:50.072259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:50.073908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:50.074047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:50.074086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:50.074122Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:50.074155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:50.074240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:08:50.421739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2024-11-21T23:08:50.422106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.422202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:50.422255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:08:50.422510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T23:08:50.422624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:08:50.426924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:50.426966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:08:50.427215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:50.427272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:08:50.427771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.427834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:08:50.428354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:50.428455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:08:50.428498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:08:50.428571Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T23:08:50.428609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:08:50.428680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T23:08:50.434705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:08:50.448324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1286 } } 2024-11-21T23:08:50.448386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:50.448509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1286 } } 2024-11-21T23:08:50.448604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1286 } } 2024-11-21T23:08:50.449227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:50.449305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:50.449453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:50.449505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:50.449582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:08:50.449645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.449683Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.449715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:50.449749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:08:50.452453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.452783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.453199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.453260Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:08:50.453367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:08:50.453398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:50.453437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:08:50.453500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 102 2024-11-21T23:08:50.453592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:08:50.453636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:08:50.453665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:08:50.453767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:50.455250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:50.455294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:498:2426] TestWaitNotification: OK eventTxId 102 2024-11-21T23:08:50.455797Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:50.456066Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 252us result status StatusSuccess 2024-11-21T23:08:50.456521Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:50.148473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:50.148605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:50.148645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:50.148678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:50.148722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:50.148747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:50.148814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:50.149164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:50.230454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:50.230525Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:50.242844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:50.247204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:50.247411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:50.259227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:50.266897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:50.267512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.267887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:50.295207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:50.296351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:50.296405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:50.296575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:50.296636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:50.296666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:50.296746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.311467Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:50.468111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:50.468368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.468612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:50.468852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:50.468912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.471454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.471590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:50.471812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.471881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:50.471934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:50.471968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:50.475256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.475321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:50.475357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:50.479725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.479797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.479854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.479910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.483756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:50.489577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:50.489873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:50.490936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.491085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:50.491130Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.491400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:50.491465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.491649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:50.491736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:50.495440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:50.495494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:50.495694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:50.495756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:50.496128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.496228Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:50.496324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:50.496354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.496411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:50.496452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.496486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:50.496511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:50.496593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:50.496630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:50.496660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:50.498306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:50.498461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:50.498507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:50.498538Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:50.498580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:50.498689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:08:50.511565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:08:50.512175Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:08:50.513478Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:08:50.537508Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:08:50.540058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:50.540431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.540850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2024-11-21T23:08:50.542188Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:50.547522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:50.547716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2024-11-21T23:08:50.548286Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T23:08:50.551015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:50.551374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.551579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2024-11-21T23:08:50.553781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:50.553909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:51.109826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:51.109926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.109977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:51.110013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:51.110052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:51.110079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:51.110146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.110489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:51.173377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:51.173424Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:51.192807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:51.196709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:51.196892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:51.209955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:51.210700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:51.211261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.211500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:51.215730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.216525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:51.216581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.216693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:51.216740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:51.216774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:51.216848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.221335Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:51.338218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:51.338484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.338729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:51.338944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:51.339005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.342904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.343081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:51.343329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.343413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:51.343467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:51.343534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:51.346065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.346125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:51.346159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:51.347777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.347825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.347890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.347933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.351358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:51.353423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:51.353578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:51.354302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.354437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:51.354481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.354704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:51.354761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.354932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:51.355006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:51.359272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:51.359318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:51.359468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.359503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:51.359845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.359945Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:51.360060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:51.360089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.360142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:51.360183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.360217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:51.360243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:51.360304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:51.360337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:51.360366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:51.362086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:51.362190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:51.362228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:51.362264Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:51.362301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:51.362432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:08:51.368880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:08:51.369396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:08:51.370599Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:08:51.385663Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:08:51.388073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:51.388403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.388503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2024-11-21T23:08:51.388945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2024-11-21T23:08:51.390154Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:51.399116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:51.399320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T23:08:51.399815Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:50.430449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:50.430561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:50.430617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:50.430649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:50.430692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:50.430719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:50.430787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:50.431097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:50.515355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:50.515408Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:50.536861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:50.540921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:50.541123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:50.558324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:50.559081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:50.559664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.559942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:50.564633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:50.565881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:50.565936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:50.566146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:50.566219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:50.566264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:50.566362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.574583Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:50.703185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:50.703391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.703602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:50.703824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:50.703878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.708708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.708840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:50.709063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.709125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:50.709179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:50.709216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:50.711977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.712047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:50.712079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:50.713958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.714024Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.714094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.714146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.723545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:50.726301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:50.726538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:50.727529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:50.727666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:50.727711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.727990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:50.728058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:50.728226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:50.728297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:50.731735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:50.731780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:50.731968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:50.732007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:50.732324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:50.732445Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:50.732562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:50.732596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.732651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:50.732698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:50.732728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:50.732773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:50.732845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:50.732880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:50.732912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:50.735714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:50.735849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:50.735889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:50.735931Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:50.735970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:50.736090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T23:08:51.412592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:08:51.412749Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:51.412840Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T23:08:51.413080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T23:08:51.413125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T23:08:51.413159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T23:08:51.413293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.413383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:51.413423Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T23:08:51.413560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T23:08:51.417062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.417108Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T23:08:51.417185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T23:08:51.417211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:08:51.417262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T23:08:51.417318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710760 2024-11-21T23:08:51.417366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:08:51.417396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T23:08:51.417426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T23:08:51.417490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T23:08:51.422570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T23:08:51.422647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T23:08:51.422706Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T23:08:51.422826Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:51.427261Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:51.427376Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:51.427446Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T23:08:51.429091Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:51.429172Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:51.429223Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T23:08:51.429329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:51.429394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:464:2428] TestWaitNotification: OK eventTxId 102 2024-11-21T23:08:51.429887Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:51.430178Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 275us result status StatusSuccess 2024-11-21T23:08:51.430629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> TSchemeShardTTLTests::BuildIndexShouldSucceed |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:51.906806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:51.906916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.906964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:51.907003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:51.907055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:51.907086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:51.907163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.907507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:51.989491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:51.989556Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:52.003268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:52.007457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:52.007677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:52.021402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:52.022223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:52.022945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:52.023275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:52.028327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:52.029689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:52.029755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:52.030001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:52.030067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:52.030117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:52.030219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.042037Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:52.184125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:52.184405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.184681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:52.184946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:52.185013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.188060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:52.188213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:52.188457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.188523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:52.188590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:52.188629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:52.191096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.191160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:52.191220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:52.194501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.194561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.194630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:52.194684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:52.199836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:52.203657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:52.203926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:52.204963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:52.205137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:52.205183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:52.205455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:52.205522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:52.205695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:52.205781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:52.209812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:52.209860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:52.210093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:52.210153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:52.210541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.210654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:52.210768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:52.210805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:52.210864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:52.210909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:52.210949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:52.210981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:52.211058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:52.211101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:52.211133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:52.223063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:52.223244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:52.223291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:52.223441Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:52.223491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:52.223638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:08:52.232121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:08:52.232774Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:08:52.234203Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:08:52.258525Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:08:52.261223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:52.261613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.262032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2024-11-21T23:08:52.263289Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:52.269362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:52.269562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2024-11-21T23:08:52.270370Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:43.669761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:43.669871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:43.669927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:43.669966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:43.670013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:43.670042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:43.670109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:43.673954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:43.749472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:43.749537Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:43.761802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:43.764373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:43.764555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:43.771420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:43.772269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:43.772992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.773358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:43.778363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.779490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:43.779539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.779681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:43.779734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:43.779769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:43.779849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.786092Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:43.951903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:43.952196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.952453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:43.952726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:43.952791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.958164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.958361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:43.958723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.958788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:43.958834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:43.958879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:43.963936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.964028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:43.964068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:43.969679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.969806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.969873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.969932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.974044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:43.977588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:43.977807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:43.978929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:43.979092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:43.979147Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.979418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:43.979494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:43.979677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:43.979779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:43.982195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:43.982246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:43.982455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:43.982510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:43.982951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:43.983061Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:43.983149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:43.983176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.983222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:43.983257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:43.983291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:43.983353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:43.983471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:43.983516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:43.983557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:43.985483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:43.985608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:43.985653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:43.985692Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:43.985737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:43.985866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 944 2024-11-21T23:08:51.587617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.587749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.587832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.587974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.588064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.588162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.588279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.588395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.595812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.595983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.596068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.596154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.596308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.596422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.596469Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T23:08:51.596613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T23:08:51.596657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:08:51.596741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T23:08:51.596828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2617:3883] message: TxId: 103 2024-11-21T23:08:51.596877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:08:51.596950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:08:51.596983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:08:51.597877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2024-11-21T23:08:51.603053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:08:51.603111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:3989:5189] TestWaitNotification: OK eventTxId 103 2024-11-21T23:08:51.603796Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:51.604142Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 378us result status StatusSuccess 2024-11-21T23:08:51.604925Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2024-11-21T23:08:51.607677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:51.607823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.608283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2024-11-21T23:08:51.610412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:51.610575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T23:08:51.610972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T23:08:51.611055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T23:08:51.611581Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:08:51.611760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:08:51.611792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4412:5611] TestWaitNotification: OK eventTxId 104 >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:52.631530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:52.631643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:52.631686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:52.631723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:52.631772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:52.631801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:52.631876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:52.632222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:52.704016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:52.704070Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:52.717546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:52.722047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:52.722245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:52.729030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:52.729779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:52.730319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:52.730616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:52.734856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:52.735959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:52.736013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:52.736210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:52.736261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:52.736302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:52.736373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.743439Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:52.865534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:52.865795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.866030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:52.866258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:52.866318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.868672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:52.868810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:52.869033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.869089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:52.869132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:52.869167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:52.871406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.871481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:52.871528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:52.873357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.873415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.873468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:52.873518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:52.877309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:52.879581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:52.879799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:52.880823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:52.880952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:52.881006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:52.881227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:52.881280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:52.881466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:52.881560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:52.884204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:52.884254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:52.884452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:52.884499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:52.884851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.884991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:52.885086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:52.885120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:52.885174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:52.885216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:52.885244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:52.885273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:52.885339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:52.885376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:52.885406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:52.887324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:52.887446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:52.887486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:52.887525Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:52.887566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:52.887678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:08:52.890595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:08:52.891189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:08:52.892335Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:08:52.909583Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:08:52.911984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:52.912343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:52.912440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2024-11-21T23:08:52.912869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2024-11-21T23:08:52.913785Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:52.918792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:52.918985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T23:08:52.919661Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:52.957813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:52.957901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:52.957948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:52.957978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:52.958017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:52.958063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:52.958127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:52.958444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:53.032292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:53.032347Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:53.045943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:53.050112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:53.050318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:53.057295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:53.057959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:53.058694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.059011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:53.063373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.064716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:53.064778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.064978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:53.065035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:53.065101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:53.065205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.077397Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:53.280789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:53.281082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.281331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:53.281587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:53.281651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.286075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.286214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:53.286515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.286589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:53.286640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:53.286683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:53.288976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.289048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:53.289135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:53.291075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.291139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.291195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.291258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.295287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:53.297639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:53.297865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:53.298973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.299126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:53.299174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.299482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:53.299556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.299751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:53.299903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:53.302957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:53.303004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:53.303232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.303287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:53.303686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.303807Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:53.303909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:53.303970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.304045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:53.304087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.304131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:53.304165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:53.304377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:53.304412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:53.304449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:53.306207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:53.306323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:53.306370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:53.306431Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:53.306472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:53.306592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... tedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:53.627218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 984 } } 2024-11-21T23:08:53.627303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 984 } } 2024-11-21T23:08:53.628313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.628359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T23:08:53.628543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.628624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:53.628704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.628778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.628823Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:08:53.628863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:53.628917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T23:08:53.630281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.630318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:53.633008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.633120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:53.633210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 320 RawX2: 4294969600 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.633264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.633296Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.633328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:53.633363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T23:08:53.641501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:08:53.641689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:08:53.641776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:08:53.647358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:08:53.647676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:08:53.647914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.648045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:08:53.648320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:08:53.648364Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T23:08:53.648471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T23:08:53.648536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T23:08:53.648596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T23:08:53.648977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.649796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.649841Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:08:53.649911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T23:08:53.649940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T23:08:53.649972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T23:08:53.650041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T23:08:53.650121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T23:08:53.650182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:08:53.650248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:08:53.650575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:53.650643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T23:08:53.650667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T23:08:53.650708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:08:53.650730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T23:08:53.650767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T23:08:53.650822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:08:53.654730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:08:53.654800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T23:08:53.655318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:53.655599Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 269us result status StatusSuccess 2024-11-21T23:08:53.656110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:53.047378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:53.047480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:53.047518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:53.047554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:53.047598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:53.047637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:53.047708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:53.048019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:53.127580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:53.127640Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:53.149518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:53.153900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:53.154163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:53.167246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:53.168184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:53.168858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.169215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:53.190457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.191821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:53.191889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.192090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:53.192159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:53.192212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:53.192314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.202741Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:53.341007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:53.341258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.341500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:53.341725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:53.341792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.348767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.348903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:53.349095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.349153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:53.349191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:53.349225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:53.351302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.351356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:53.351396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:53.352861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.352917Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.352978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.353023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.356866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:53.358773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:53.358997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:53.360045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.360187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:53.360238Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.360498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:53.360574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.360752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:53.360828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:53.363296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:53.363343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:53.363524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.363563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:53.363922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.364023Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:53.364121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:53.364163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.364217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:53.364254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.364284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:53.364314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:53.364379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:53.364411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:53.364437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:53.366169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:53.366281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:53.366322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:53.366359Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:53.366415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:53.366531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... elatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:53.694733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2609 } } 2024-11-21T23:08:53.694817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2609 } } 2024-11-21T23:08:53.695580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.695627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T23:08:53.695808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.695861Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:53.695955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.696027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.696065Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:08:53.696098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:08:53.696136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T23:08:53.696651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.696685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T23:08:53.696784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.696815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:08:53.696870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:08:53.696918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.696952Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.696981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:08:53.697010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T23:08:53.701811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:08:53.701905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:08:53.701942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:08:53.704453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:08:53.704622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:08:53.704709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.704795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:08:53.705029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.705214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:08:53.705254Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T23:08:53.705376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T23:08:53.705430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T23:08:53.705482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T23:08:53.705674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.705709Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:08:53.705758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T23:08:53.705781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T23:08:53.705820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T23:08:53.705887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T23:08:53.705926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T23:08:53.705974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:08:53.706007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:08:53.706143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:53.706189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T23:08:53.706209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T23:08:53.706234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:08:53.706253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T23:08:53.706270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T23:08:53.706324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:08:53.708560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:08:53.708615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T23:08:53.709115Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:53.709373Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 269us result status StatusSuccess 2024-11-21T23:08:53.709877Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:53.529977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:53.530071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:53.530111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:53.530142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:53.530189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:53.530219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:53.530283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:53.530648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:53.604780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:53.604839Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:53.616765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:53.620673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:53.620852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:53.626455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:53.627081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:53.627704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.627959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:53.634339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.635395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:53.635442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.635641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:53.635704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:53.635775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:53.635867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.642200Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:53.748282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:53.748485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.748678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:53.748853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:53.748902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.752482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.752640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:53.752928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.752992Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:53.753043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:53.753083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:53.757285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.757355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:53.757396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:53.760191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.760251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.760314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.760363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.763947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:53.765841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:53.766008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:53.766915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.767043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:53.767083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.767294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:53.767359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.767525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:53.767610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:53.770816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:53.770859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:53.770993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.771027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:53.771307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.771408Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:53.771509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:53.771552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.771607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:53.771655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.771689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:53.771717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:53.771775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:53.771812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:53.771843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:53.773670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:53.773780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:53.773817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:53.773855Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:53.773891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:53.773981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:54.438311Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:54.438661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T23:08:54.438790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:08:54.438971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T23:08:54.439005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T23:08:54.439045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T23:08:54.440643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:54.440788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:54.440861Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T23:08:54.440909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T23:08:54.451922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.451993Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T23:08:54.452111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T23:08:54.452154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:08:54.452198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T23:08:54.452272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710760 2024-11-21T23:08:54.452324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:08:54.452355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T23:08:54.452406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T23:08:54.452525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T23:08:54.464333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T23:08:54.464441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T23:08:54.464567Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T23:08:54.464665Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:54.466838Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:54.466946Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:54.467015Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T23:08:54.474985Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:08:54.475109Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:377:2351], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:08:54.475150Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T23:08:54.475293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:08:54.475332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:464:2428] TestWaitNotification: OK eventTxId 102 2024-11-21T23:08:54.476013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:54.476419Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 391us result status StatusSuccess 2024-11-21T23:08:54.476943Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestDeleteTablet >> DataShardTxOrder::ZigZag_oo [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |80.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteTabletWithFollowers |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2024-11-21T23:08:31.989036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:31.989096Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:31.989191Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:32.001818Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:32.002329Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T23:08:32.005864Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:32.017446Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:32.058615Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:32.059176Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:32.060830Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T23:08:32.060938Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T23:08:32.061012Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T23:08:32.061349Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:32.101987Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T23:08:32.102205Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:32.102376Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T23:08:32.102488Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T23:08:32.102540Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T23:08:32.102583Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:32.102963Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.103029Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.103169Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T23:08:32.103295Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T23:08:32.103537Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:32.103602Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:32.103648Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T23:08:32.103686Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:32.103722Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:32.103758Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T23:08:32.103802Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:32.150715Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:32.150789Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:32.150844Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T23:08:32.153760Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T23:08:32.153841Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:32.153939Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T23:08:32.154092Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T23:08:32.154137Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T23:08:32.154197Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T23:08:32.154242Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:32.154280Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T23:08:32.154319Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T23:08:32.154351Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:32.154687Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:32.154731Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T23:08:32.154766Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T23:08:32.154801Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:32.154866Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T23:08:32.154919Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T23:08:32.154959Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T23:08:32.154996Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:32.155035Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:32.184421Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T23:08:32.184515Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:32.184572Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:32.184618Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T23:08:32.184703Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:32.185256Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:32.185323Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:32.185368Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T23:08:32.185502Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T23:08:32.185537Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:32.185681Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:32.185722Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:32.185768Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T23:08:32.185804Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T23:08:32.194167Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T23:08:32.194251Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:32.194519Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.194565Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.194622Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:32.194667Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:32.194708Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:32.194765Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T23:08:32.194816Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T23:08:32.194875Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:32.194909Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:32.194948Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:32.194985Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:32.195169Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T23:08:32.195207Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:32.195235Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:32.195258Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T23:08:32.195282Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T23:08:32.195344Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:32.195384Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T23:08:32.195431Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:32.195465Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:32.195534Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T23:08:32.195573Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T23:08:32.195611Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T23:08:32.195661Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:32.195695Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:32.195733Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... aitInRS 2024-11-21T23:08:55.477658Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:55.477687Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2024-11-21T23:08:55.477712Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2024-11-21T23:08:55.477739Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2024-11-21T23:08:55.478266Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2024-11-21T23:08:55.478369Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T23:08:55.478461Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:55.478491Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2024-11-21T23:08:55.478522Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2024-11-21T23:08:55.478553Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T23:08:55.478808Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2024-11-21T23:08:55.478852Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2024-11-21T23:08:55.478910Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2024-11-21T23:08:55.478955Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2024-11-21T23:08:55.478998Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2024-11-21T23:08:55.479026Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2024-11-21T23:08:55.479058Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2024-11-21T23:08:55.479112Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:55.479165Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T23:08:55.479217Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T23:08:55.479269Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T23:08:55.479560Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:228:2223], Recipient [6:228:2223]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:55.479603Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:55.479656Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:55.479691Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:55.479723Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:55.479758Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2024-11-21T23:08:55.479788Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2024-11-21T23:08:55.479822Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.479851Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:55.479878Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:55.479907Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:55.480632Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2024-11-21T23:08:55.480680Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.480708Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:55.480735Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2024-11-21T23:08:55.480764Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2024-11-21T23:08:55.480804Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.480830Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2024-11-21T23:08:55.480855Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:55.480879Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:55.480931Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2024-11-21T23:08:55.480963Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2024-11-21T23:08:55.481000Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2024-11-21T23:08:55.481046Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.481074Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:55.481097Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2024-11-21T23:08:55.481123Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2024-11-21T23:08:55.481173Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.481197Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2024-11-21T23:08:55.481222Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2024-11-21T23:08:55.481250Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2024-11-21T23:08:55.481286Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.481310Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2024-11-21T23:08:55.481334Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2024-11-21T23:08:55.481359Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2024-11-21T23:08:55.481387Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.481411Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2024-11-21T23:08:55.481433Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2024-11-21T23:08:55.481458Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2024-11-21T23:08:55.481481Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.481503Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T23:08:55.481525Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2024-11-21T23:08:55.481552Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2024-11-21T23:08:55.481926Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2024-11-21T23:08:55.481982Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T23:08:55.482029Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.482052Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2024-11-21T23:08:55.482077Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2024-11-21T23:08:55.482105Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T23:08:55.482309Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2024-11-21T23:08:55.482342Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2024-11-21T23:08:55.482374Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2024-11-21T23:08:55.482429Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2024-11-21T23:08:55.482469Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2024-11-21T23:08:55.482493Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2024-11-21T23:08:55.482519Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2024-11-21T23:08:55.482555Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:55.482586Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:55.482616Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:55.482647Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:55.496500Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2024-11-21T23:08:55.496631Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2024-11-21T23:08:55.496723Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:55.496788Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2024-11-21T23:08:55.496874Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:08:55.496942Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:55.497274Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2024-11-21T23:08:55.497303Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2024-11-21T23:08:55.497337Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T23:08:55.497361Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2024-11-21T23:08:55.497393Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:08:55.497416Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:49.021948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:49.022080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:49.022118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:49.022151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:49.022197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:49.022225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:49.022312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:49.022704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:49.109090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:49.109167Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:49.122948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:49.127252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:49.127462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:49.134310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:49.135048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:49.135662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:49.135974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:49.140772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:49.142084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:49.142145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:49.142342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:49.142444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:49.142496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:49.142597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.149869Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:49.295814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:49.296073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.296305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:49.296522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:49.296599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.302027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:49.302183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:49.302422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.302597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:49.302642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:49.302677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:49.307160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.307262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:49.307335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:49.309906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.309966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.310022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:49.310092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:49.313575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:49.318511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:49.318691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:49.319486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:49.319610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:49.319650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:49.319847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:49.319906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:49.320052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:49.320121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:49.321876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:49.321915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:49.322088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:49.322132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:49.322553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:49.322663Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:49.322768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:49.322802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:49.322861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:49.322905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:49.322937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:49.322971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:49.323042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:49.323078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:49.323109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:49.330637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:49.330814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:49.330859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:49.330901Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:49.330957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:49.331114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 691793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.691916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.692018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.692116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.692244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.693373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.693492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.693584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.693657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.693722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.693793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.693863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.693989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.697764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.697921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.698015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.698082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.698194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.698241Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:08:55.698374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:08:55.698440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:08:55.698493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T23:08:55.698582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2615:3881] message: TxId: 101 2024-11-21T23:08:55.698628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:08:55.698718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:08:55.698759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:08:55.699775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2024-11-21T23:08:55.700481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.707922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:08:55.707987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2616:3882] TestWaitNotification: OK eventTxId 101 2024-11-21T23:08:55.708700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:08:55.709003Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 344us result status StatusSuccess 2024-11-21T23:08:55.709734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false Serializer { ClassName: "ARROW_SERIALIZER" ArrowCompression { Codec: ColumnCodecPlain } } StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" ColumnCodec: ColumnCodecPlain } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T23:08:55.715677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:55.715873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:08:55.716469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2024-11-21T23:08:55.719186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:55.719345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TPersQueueTest::EventBatching [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestFollowers |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T23:07:40.368716Z :ReadSession INFO: Random seed for debugging is 1732230460368678 2024-11-21T23:07:41.293917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873181838525888:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:41.293962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:41.416373Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873181175953399:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:41.416417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0013c8/r3tmp/tmpMt52Fj/pdisk_1.dat 2024-11-21T23:07:41.751159Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:41.775195Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:42.455496Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:42.495931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:42.510218Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:42.532668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:42.532786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:42.533216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:42.533264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:42.556312Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:07:42.556486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:42.559667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19216, node 1 2024-11-21T23:07:42.918273Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0013c8/r3tmp/yandexw7gecL.tmp 2024-11-21T23:07:42.918311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0013c8/r3tmp/yandexw7gecL.tmp 2024-11-21T23:07:42.918521Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0013c8/r3tmp/yandexw7gecL.tmp 2024-11-21T23:07:42.918658Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:07:43.078616Z INFO: TTestServer started on Port 17867 GrpcPort 19216 TClient is connected to server localhost:17867 PQClient connected to localhost:19216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:43.732489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:07:46.294727Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873181838525888:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:46.294798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:46.416610Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873181175953399:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:46.416663Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:47.181026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873206945757276:2283], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:47.181145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:47.181988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873206945757288:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:47.205527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T23:07:47.269255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873206945757290:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T23:07:47.717221Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.123014s 2024-11-21T23:07:47.717265Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.123318s 2024-11-21T23:07:47.804778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:07:47.811599Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873206945757327:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:47.810282Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873207608330538:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:47.812988Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWUxODVjYi1mZTI5MjU5MC03ZDhlOWE4NC01ZGZjOGU3ZA==, ActorId: [2:7439873206945757274:2282], ActorState: ExecuteState, TraceId: 01jd8fpkkde0e1aakxc3s962j8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:47.812204Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQyZDVkNS02Y2MwMzlkOC1jZTU5MzBlZS0zMmYxOGVkYw==, ActorId: [1:7439873207608330498:2305], ActorState: ExecuteState, TraceId: 01jd8fpkrjfvgpgfvkhfkk674z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:47.814257Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:47.815169Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:48.147128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:07:48.536795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:19216", true, true, 1000); 2024-11-21T23:07:49.327733Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8fpn58e9v7eqnhqtgmhav8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc3ZjRiNy1kMThjYWFkMS1hMTQ4OGY0Mi0zODIwZGY3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439873216198265565:2988] === CheckClustersList. Ok 2024-11-21T23:07:56.281744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:19216 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: 2024-11-21T23:07:56.647466Z node 1 :PERSQUEUE INFO: proxy ... inited, cookie 3 from offset3 2024-11-21T23:08:55.746037Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 3551852a-9dd5fd97-1dc9f760-d7a0760e has messages 1 2024-11-21T23:08:55.746192Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 read done: guid# 3551852a-9dd5fd97-1dc9f760-d7a0760e, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2024-11-21T23:08:55.746235Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 response to read: guid# 3551852a-9dd5fd97-1dc9f760-d7a0760e 2024-11-21T23:08:55.746599Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 Process answer. Aval parts: 0 2024-11-21T23:08:55.749990Z :DEBUG: [/Root] [/Root] [3cbe9c94-c5d44da7-96d1183e-6b521c84] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:55.750271Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T23:08:55.750490Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T23:08:55.750581Z :DEBUG: [/Root] [/Root] [3cbe9c94-c5d44da7-96d1183e-6b521c84] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T23:08:54.591000Z WriteTime: 2024-11-21T23:08:54.596000Z Ip: "ipv6:[::1]:39334" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:39334" } } } } 2024-11-21T23:08:55.750787Z :INFO: [/Root] [/Root] [3cbe9c94-c5d44da7-96d1183e-6b521c84] Closing read session. Close timeout: 3.000000s 2024-11-21T23:08:55.750843Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T23:08:55.750892Z :INFO: [/Root] [/Root] [3cbe9c94-c5d44da7-96d1183e-6b521c84] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1671 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:08:55.750526Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T23:08:55.750672Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 got read request: guid# 932adf43-bbe8d887-ebf851d5-e0aac6d4 2024-11-21T23:08:55.751587Z :INFO: [/Root] [/Root] [3cbe9c94-c5d44da7-96d1183e-6b521c84] Closing read session. Close timeout: 0.000000s 2024-11-21T23:08:55.751645Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T23:08:55.751694Z :INFO: [/Root] [/Root] [3cbe9c94-c5d44da7-96d1183e-6b521c84] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1672 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:08:55.751832Z :NOTICE: [/Root] [/Root] [3cbe9c94-c5d44da7-96d1183e-6b521c84] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:08:55.753253Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 grpc read done: success# 0, data# { } 2024-11-21T23:08:55.753292Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 grpc read failed 2024-11-21T23:08:55.753328Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 grpc closed 2024-11-21T23:08:55.753389Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_12224531079145186697_v1 is DEAD 2024-11-21T23:08:55.755337Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:08:55.755390Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_12224531079145186697_v1 2024-11-21T23:08:55.755438Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7439873493277121577:2502] destroyed 2024-11-21T23:08:55.755402Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439873493277121572:2499] disconnected; active server actors: 1 2024-11-21T23:08:55.755438Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439873493277121572:2499] client user disconnected session shared/user_7_1_12224531079145186697_v1 2024-11-21T23:08:55.755495Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_12224531079145186697_v1 2024-11-21T23:08:55.851451Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:08:55.851490Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:57.715090Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.715166Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.715209Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:08:57.715630Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:08:57.716305Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:08:57.716533Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.716985Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T23:08:57.718549Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.718580Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.718614Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:08:57.718901Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:08:57.719314Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:08:57.719507Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.719778Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:08:57.721037Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:08:57.722659Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T23:08:57.722788Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T23:08:57.723028Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:08:57.723078Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:08:57.723111Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:08:57.723173Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T23:08:57.730233Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.730274Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.730337Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:08:57.730739Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:08:57.732356Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:08:57.732553Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.732828Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:08:57.733627Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.733839Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:08:57.734024Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:08:57.734102Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:08:57.734229Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2024-11-21T23:08:57.736417Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.736475Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.736522Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:08:57.736895Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:08:57.737336Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:08:57.737463Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.738454Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:08:57.738660Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:08:57.738743Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:08:57.738850Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: Took 11.598288 seconds |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowerPromotion |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:59.081854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:59.082010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:59.082079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:59.082138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:59.082193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:59.082223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:59.082275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:59.082599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:59.151288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:59.151375Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:59.164055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:59.168138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:59.168348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:59.178856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:59.179492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:59.180236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:59.180516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:59.185076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:59.186522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:59.186589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:59.186797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:59.186851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:59.186904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:59.187011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.194457Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:59.336492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:59.336789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.337020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:59.337247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:59.337314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.346643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:59.346753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:59.346908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.346961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:59.347037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:59.347067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:59.352479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.352552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:59.352624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:59.355548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.355608Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.355663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:59.355716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:59.365296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:59.367344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:59.367553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:59.368629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:59.368751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:59.368811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:59.369060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:59.369109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:59.369283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:59.369373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:59.371444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:59.371492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:59.371664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:59.371703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:59.372133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.372179Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:59.372275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:59.372312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:59.372352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:59.372409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:59.372447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:59.372474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:59.372530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:59.372582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:59.372617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:59.375144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:59.375250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:59.375296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:59.375354Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:59.375392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:59.375529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:08:59.378960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:08:59.379497Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:08:59.384318Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:08:59.400681Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:08:59.403253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:59.403636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2024-11-21T23:08:59.403763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name: __ydb_parent, at schemeshard: 72057594046678944 2024-11-21T23:08:59.403798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name: __ydb_parent, at schemeshard: 72057594046678944 2024-11-21T23:08:59.404940Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:08:59.413070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name: __ydb_parent" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:59.413236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name: __ydb_parent, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2024-11-21T23:08:59.413730Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2024-11-21T23:08:59.416949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:59.417338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2024-11-21T23:08:59.417479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2024-11-21T23:08:59.417517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2024-11-21T23:08:59.425413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:59.425583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] >> TAsyncIndexTests::OnlineBuild |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TAsyncIndexTests::Decimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2024-11-21T23:08:27.091699Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:27.091825Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:27.113237Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:27.113365Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:27.146451Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:27.147062Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=11352324288245816971, session=0, seqNo=0) 2024-11-21T23:08:27.147239Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:27.163924Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=11352324288245816971, session=1) 2024-11-21T23:08:27.164297Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=7722420011768068077, session=0, seqNo=0) 2024-11-21T23:08:27.164437Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T23:08:27.180425Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=7722420011768068077, session=2) 2024-11-21T23:08:27.181286Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T23:08:27.181474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T23:08:27.181565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T23:08:27.181790Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock2" count=1) 2024-11-21T23:08:27.181880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2024-11-21T23:08:27.181949Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2024-11-21T23:08:27.182052Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=1, semaphore="Lock2" count=1) 2024-11-21T23:08:27.182115Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2024-11-21T23:08:27.194924Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2024-11-21T23:08:27.195084Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2024-11-21T23:08:27.195115Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2024-11-21T23:08:27.195968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=15040902052590774393, name="Lock1") 2024-11-21T23:08:27.196075Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=15040902052590774393) 2024-11-21T23:08:27.196679Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=18050129653526933488, name="Lock2") 2024-11-21T23:08:27.196766Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=18050129653526933488) 2024-11-21T23:08:27.216715Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:27.216844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:27.217392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:27.217992Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:27.275931Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:27.276102Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T23:08:27.276162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2024-11-21T23:08:27.276189Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2024-11-21T23:08:27.276578Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:189:2203], cookie=1679347122671429109, name="Lock1") 2024-11-21T23:08:27.276667Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:189:2203], cookie=1679347122671429109) 2024-11-21T23:08:27.277243Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:197:2210], cookie=14884951869807014748, name="Lock2") 2024-11-21T23:08:27.277312Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:197:2210], cookie=14884951869807014748) 2024-11-21T23:08:27.681646Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:27.698971Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:27.994738Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:28.013152Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:28.293870Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:28.319063Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:28.596608Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:28.612722Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:28.905267Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:28.925222Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.185142Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.207477Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.482676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.495345Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.748389Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.766721Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.036723Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.059118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.376260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.395935Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.707946Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.723193Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.023662Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.046426Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.329681Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.343229Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.621006Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.635264Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.960899Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.973178Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.266758Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.287445Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.561986Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.577963Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.862950Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.877538Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.153120Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.172715Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.509890Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.538206Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.842610Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.858187Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.119369Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.140787Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.426617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.439021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.717272Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.734313Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.054341Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.079193Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.360587Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.375503Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.654003Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.675145Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.962565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.985841Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.253907Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.268518Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.563442Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:363:2365], cookie=10787977353420315377, name="Lock1") 2024-11-21T23:08:36.563571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:363:2365], cookie=10787977353420315377) 2024-11-21T23:08:36.564160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:366:2368], cookie=12481248322535233132, name="Lock2") 2024-11-21T23:08:36.564234Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:366:2368], cookie=12481248322535233132) 2024-11-21T23:08:36.623909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.637290Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.901832Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.914919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:37.181411Z node 1 :KESUS_TABLET DEBUG: [72 ... 24-11-21T23:08:54.256570Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:54.557985Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:54.571845Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:54.848953Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:54.864250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:55.149873Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:55.162444Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:55.426447Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:55.439561Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:55.742738Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:55.755460Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:56.024949Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:56.037007Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:56.302870Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:56.319399Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:56.602450Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:56.623275Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:56.905184Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:56.919343Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:57.229969Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:57.247479Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:57.522757Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:57.539221Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:57.812102Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:57.825738Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:58.102791Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:58.121604Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:58.390549Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:58.403180Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:58.704108Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:58.719265Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:59.017224Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:59.037579Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:59.306063Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:59.318429Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:59.630755Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:59.654424Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:59.928870Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:59.944144Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:00.248878Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2024-11-21T23:09:00.248972Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2024-11-21T23:09:00.249026Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2024-11-21T23:09:00.249126Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2024-11-21T23:09:00.249194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2024-11-21T23:09:00.249257Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2024-11-21T23:09:00.273471Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2024-11-21T23:09:00.274289Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:329:2342], cookie=9528031506716407221, name="Lock1") 2024-11-21T23:09:00.274425Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:329:2342], cookie=9528031506716407221) 2024-11-21T23:09:00.275024Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:332:2345], cookie=11506879890565591972, name="Lock2") 2024-11-21T23:09:00.275114Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:332:2345], cookie=11506879890565591972) 2024-11-21T23:09:00.275649Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:335:2348], cookie=18307985323177326419) 2024-11-21T23:09:00.275730Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:335:2348], cookie=18307985323177326419) 2024-11-21T23:09:00.298931Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:09:00.299047Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:09:00.299697Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:09:00.300598Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:09:00.358093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:09:00.358267Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2024-11-21T23:09:00.358326Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2024-11-21T23:09:00.358780Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:375:2378], cookie=3000455671678319146) 2024-11-21T23:09:00.358872Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:375:2378], cookie=3000455671678319146) 2024-11-21T23:09:00.359539Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:382:2384], cookie=1362312314039517135, name="Lock1") 2024-11-21T23:09:00.359619Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:382:2384], cookie=1362312314039517135) 2024-11-21T23:09:00.360198Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:385:2387], cookie=12160588699497831023, name="Lock2") 2024-11-21T23:09:00.360292Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:385:2387], cookie=12160588699497831023) 2024-11-21T23:09:00.919811Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:09:00.919939Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:09:00.939300Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:09:00.939475Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:09:00.956647Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:09:00.957226Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=1826503171536850210, session=0, seqNo=0) 2024-11-21T23:09:00.957402Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:09:00.991509Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=1826503171536850210, session=1) 2024-11-21T23:09:00.991938Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=8618756496214983617, session=0, seqNo=0) 2024-11-21T23:09:00.992121Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T23:09:01.007282Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=8618756496214983617, session=2) 2024-11-21T23:09:01.007718Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2024-11-21T23:09:01.031171Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2024-11-21T23:09:01.031885Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:143:2167], cookie=2895726583126301758, name="Sem1", limit=1) 2024-11-21T23:09:01.032067Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T23:09:01.051202Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:143:2167], cookie=2895726583126301758) 2024-11-21T23:09:01.051780Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=1, semaphore="Sem1" count=100500) 2024-11-21T23:09:01.072227Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2024-11-21T23:09:01.072699Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=1, semaphore="Sem1" count=1) 2024-11-21T23:09:01.072892Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T23:09:01.073143Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=2, semaphore="Sem1" count=1) 2024-11-21T23:09:01.087035Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2024-11-21T23:09:01.087143Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2024-11-21T23:09:01.087846Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=12836654725997354991, name="Sem1") 2024-11-21T23:09:01.087947Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=12836654725997354991) 2024-11-21T23:09:01.088488Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=8660534686000951862, name="Sem1") 2024-11-21T23:09:01.088587Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=8660534686000951862) 2024-11-21T23:09:01.089097Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:159:2183], cookie=3973043638284759489, name="Sem1", force=0) 2024-11-21T23:09:01.103375Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:159:2183], cookie=3973043638284759489) 2024-11-21T23:09:01.105290Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:164:2188], cookie=9980010997177403698, name="Sem1", force=1) 2024-11-21T23:09:01.105423Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2024-11-21T23:09:01.126056Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:164:2188], cookie=9980010997177403698) >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowersCrossDC_Easy |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:09:01.697242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:09:01.697348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:01.697410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:09:01.697448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:09:01.697500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:09:01.697534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:09:01.697583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:01.697923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:01.779504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:09:01.779556Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:01.789227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:01.792215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:09:01.792379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:09:01.798410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:09:01.798921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:09:01.799512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:01.799776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:01.804263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:01.805791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:01.805868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:01.806158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:01.806232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:01.806315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:01.806461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.812830Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:09:01.969260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:01.969512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.969711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:01.969917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:01.969996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.974066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:01.974179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:01.974361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.974454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:01.974500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:01.974536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:01.976449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.976503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:01.976538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:01.978279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.978323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.978412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:01.978467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:01.982065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:01.983815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:01.984007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:01.984985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:01.985095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:01.985142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:01.985396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:01.985466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:01.985622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:01.985706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:01.987648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:01.987690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:01.987812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:01.987848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:09:01.988108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.988149Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:01.988237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:01.988263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:01.988306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:01.988357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:01.988388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:01.988416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:01.988468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:01.988514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:01.988543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:09:01.990330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:01.990489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:01.990538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:09:01.990603Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:09:01.990643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:01.990770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710760, response: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T23:09:02.732419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP LOCK, path: /MyRoot/Table 2024-11-21T23:09:02.732605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2024-11-21T23:09:02.732696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2024-11-21T23:09:02.732782Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 102, cookie: 102, txId: 281474976710760, status: StatusAccepted 2024-11-21T23:09:02.732910Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2024-11-21T23:09:02.733358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.733418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2024-11-21T23:09:02.733467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:09:02.733601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:02.736481Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:09:02.736622Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:09:02.737058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2024-11-21T23:09:02.737169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:09:02.737336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2024-11-21T23:09:02.737365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2024-11-21T23:09:02.737396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2024-11-21T23:09:02.737627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:02.737736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:02.737792Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2024-11-21T23:09:02.737837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T23:09:02.746998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.747041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2024-11-21T23:09:02.747104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T23:09:02.747123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:09:02.747161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2024-11-21T23:09:02.747205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:121:2147] message: TxId: 281474976710760 2024-11-21T23:09:02.747250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2024-11-21T23:09:02.747275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T23:09:02.747292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2024-11-21T23:09:02.747347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2024-11-21T23:09:02.749891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2024-11-21T23:09:02.749952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2024-11-21T23:09:02.750022Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2024-11-21T23:09:02.750112Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:09:02.752286Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:09:02.752387Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:09:02.752449Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2024-11-21T23:09:02.754134Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2024-11-21T23:09:02.754225Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:376:2350], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2024-11-21T23:09:02.754276Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2024-11-21T23:09:02.754459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:09:02.754501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:465:2429] TestWaitNotification: OK eventTxId 102 >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:09:02.551169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:09:02.551284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:02.551354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:09:02.551391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:09:02.551444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:09:02.551476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:09:02.551529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:02.551882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:02.624226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:09:02.624264Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:02.638304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:02.642632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:09:02.642848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:09:02.660132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:09:02.663351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:09:02.664001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:02.664316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:02.669123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:02.670424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:02.670476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:02.670625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:02.670659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:02.670695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:02.670840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.676181Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:09:02.801314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:02.801583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.801807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:02.802037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:02.802100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.806355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:02.806521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:02.806739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.806802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:02.806856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:02.806891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:02.808738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.808798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:02.808833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:02.810561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.810607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.810676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:02.810730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:02.814297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:02.820738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:02.820936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:02.821886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:02.822007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:02.822056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:02.822343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:02.822416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:02.822578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:02.822728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:02.830287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:02.830349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:02.830563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:02.830607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:09:02.830945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.831014Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:02.831121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:02.831154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:02.831194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:02.831252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:02.831306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:02.831334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:02.831395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:02.831435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:02.831465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:09:02.833269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:02.833371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:02.833408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:09:02.833458Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:09:02.833507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:02.833621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 154295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 317 RawX2: 4294969598 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:03.154365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:03.154427Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:09:03.154474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:09:03.154514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2024-11-21T23:09:03.155046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:03.155076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2024-11-21T23:09:03.155219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:03.155252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:09:03.155313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:03.155359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:03.155384Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:03.155406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:09:03.155432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T23:09:03.161414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:09:03.161545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:09:03.161597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:09:03.166612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:09:03.166883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:09:03.167280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:03.167564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:09:03.167894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:03.168266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:09:03.168317Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T23:09:03.168405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2024-11-21T23:09:03.168440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2024-11-21T23:09:03.168475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T23:09:03.168908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:03.168960Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:09:03.169024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2024-11-21T23:09:03.169050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T23:09:03.169078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T23:09:03.169129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 101 2024-11-21T23:09:03.169175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T23:09:03.169215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:09:03.169244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:09:03.169371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:09:03.169417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T23:09:03.169437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T23:09:03.169473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:09:03.169511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T23:09:03.169532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T23:09:03.169574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:09:03.172270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:09:03.172314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:383:2348] TestWaitNotification: OK eventTxId 101 2024-11-21T23:09:03.172854Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:09:03.173069Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 249us result status StatusSuccess 2024-11-21T23:09:03.173904Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TVectorIndexTests::CreateTableMultiColumn |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:47.385120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:47.385239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:47.386730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:47.386782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:47.386836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:47.386885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:47.386974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:47.387422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:47.480631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:47.480716Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:47.497332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:47.501702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:47.501858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:47.510480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:47.512586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:47.513333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:47.513680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:47.518738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:47.520081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:47.520138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:47.520383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:47.520440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:47.520491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:47.520604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.527348Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:47.660671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:47.662196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.662469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:47.662706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:47.662769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.667451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:47.667582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:47.667808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.667857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:47.667898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:47.667930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:47.669921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.669977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:47.670010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:47.671693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.671742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.671824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:47.671877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:47.682183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:47.684381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:47.684597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:47.685702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:47.685852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:47.685908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:47.686176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:47.686262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:47.686469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:47.686572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:47.688827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:47.688875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:47.689058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:47.689101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:47.689441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.689553Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:47.689654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:47.689687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:47.689745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:47.689788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:47.689828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:47.689857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:47.689940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:47.689982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:47.690022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:47.692016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:47.692134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:47.692226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:47.692270Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:47.692307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:47.692562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hard::TEvSchemaChanged> complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.171040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.171105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T23:08:48.171208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T23:08:48.171241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:08:48.171301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2024-11-21T23:08:48.171380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:382:2347] message: TxId: 103 2024-11-21T23:08:48.171427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:08:48.171464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:08:48.171493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:08:48.171597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:08:48.173354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:08:48.173397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:496:2460] TestWaitNotification: OK eventTxId 103 2024-11-21T23:08:52.503546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:08:52.503620Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:53.899979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.038 2024-11-21T23:08:53.900100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0919 2024-11-21T23:08:53.931040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T23:08:53.931287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T23:08:53.931377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T23:08:53.931431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:08:53.931634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T23:08:53.931675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T23:08:53.931701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T23:08:53.943659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:08:56.610411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0241 2024-11-21T23:08:56.610605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0102 2024-11-21T23:08:56.652435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T23:08:56.652670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T23:08:56.652747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T23:08:56.652789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T23:08:56.652919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T23:08:56.652973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T23:08:56.653004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:08:56.663410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:08:59.287123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0102 2024-11-21T23:08:59.287212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0241 2024-11-21T23:08:59.322728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T23:08:59.322928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T23:08:59.322999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T23:08:59.323040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:08:59.323159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T23:08:59.323214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T23:08:59.323249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T23:08:59.337746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:09:02.089479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0039 2024-11-21T23:09:02.089613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0068 2024-11-21T23:09:02.135218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T23:09:02.135397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T23:09:02.135471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2024-11-21T23:09:02.135517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:09:02.135626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2024-11-21T23:09:02.135663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2024-11-21T23:09:02.135698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2024-11-21T23:09:02.146077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:09:04.754765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T23:09:04.754912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:09:04.755132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:09:04.755385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60025000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:09:04.755977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:09:04.756440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:09:04.756485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:09:04.762462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:09:04.763774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:09:04.763848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.025000Z, at schemeshard: 72057594046678944 2024-11-21T23:09:04.763911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TVectorIndexTests::CreateTableMultiColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:47.061598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:47.061697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:47.061734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:47.061761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:47.061795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:47.061823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:47.061897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:47.062206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:47.131442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:47.131526Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:47.158218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:47.162956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:47.163146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:47.171248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:47.171863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:47.172393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:47.172679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:47.176792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:47.177969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:47.178026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:47.178220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:47.178287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:47.178333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:47.178444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.184698Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:47.293664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:47.293877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.294089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:47.294297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:47.294356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.296499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:47.296637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:47.296875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.296925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:47.296985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:47.297027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:47.298911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.298967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:47.298999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:47.300557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.300612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.300667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:47.300715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:47.304271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:47.306039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:47.306342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:47.307286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:47.307403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:47.307452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:47.307681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:47.307729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:47.307891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:47.307972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:47.310454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:47.310496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:47.310660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:47.310703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:47.311023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:47.311137Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:47.311238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:47.311268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:47.311326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:47.311370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:47.311404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:47.311434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:47.311494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:47.311533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:47.311562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:47.313778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:47.313894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:47.313932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:47.313969Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:47.314004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:47.314118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:09:05.049101Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:05.049152Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:05.049278Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:09:05.049446Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:05.049482Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:09:05.049523Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:202:2205], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T23:09:05.049917Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.049961Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:09:05.051545Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:09:05.051631Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:09:05.051662Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:09:05.051693Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:09:05.051747Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:05.052287Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:09:05.052361Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:09:05.052387Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:09:05.052414Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:09:05.052444Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:09:05.052502Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T23:09:05.053482Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 798 } } 2024-11-21T23:09:05.053522Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T23:09:05.053633Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 798 } } 2024-11-21T23:09:05.053722Z node 18 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 798 } } FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:09:05.054303Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:05.054338Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T23:09:05.054542Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:05.054586Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:09:05.054658Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 303 RawX2: 77309413619 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:05.054707Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:05.054746Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.054792Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:09:05.054833Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T23:09:05.057323Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:09:05.058784Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:09:05.058897Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.058990Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.059076Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.059112Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:09:05.059218Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:09:05.059250Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:09:05.059296Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T23:09:05.059359Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:339:2314] message: TxId: 101 2024-11-21T23:09:05.059404Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:09:05.059442Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:09:05.059473Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:09:05.059580Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:09:05.061360Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:09:05.061399Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:340:2315] TestWaitNotification: OK eventTxId 101 2024-11-21T23:09:05.061822Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:09:05.062025Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 234us result status StatusSuccess 2024-11-21T23:09:05.062483Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:09:05.155724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:09:05.155838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:05.155886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:09:05.155929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:09:05.155983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:09:05.156022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:09:05.156078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:05.156390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:05.231994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:09:05.232061Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:05.245184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:05.249181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:09:05.249363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:09:05.258268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:09:05.259542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:09:05.260244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:05.260580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:05.265958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:05.267350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:05.267416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:05.267617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:05.267688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:05.267733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:05.267825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.275699Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:09:05.401779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:05.402067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.402325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:05.402596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:05.402694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.405269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:05.405452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:05.405676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.405755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:05.405812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:05.405853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:05.408363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.408426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:05.408469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:05.410338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.410434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.410494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:05.410542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:05.422513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:05.424259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:05.424434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:05.425234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:05.425337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:05.425394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:05.425632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:05.425684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:05.425818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:05.425903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:05.429689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:05.429742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:05.429941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:05.429990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:09:05.430421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:05.430481Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:05.430619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:05.430656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:05.430717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:05.430778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:05.430820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:05.430855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:05.430927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:05.430967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:05.430996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:09:05.432534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:05.432671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:05.432732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:09:05.432800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:09:05.432845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:05.432952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... sVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:05.810886Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:09:05.811158Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 278us result status StatusSuccess 2024-11-21T23:09:05.811708Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:05.812435Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:09:05.812723Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 260us result status StatusSuccess 2024-11-21T23:09:05.813187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T23:07:55.002460Z :ReadSession INFO: Random seed for debugging is 1732230475002424 2024-11-21T23:07:55.382643Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873241774373815:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:55.382704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:55.443851Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873241454960422:2245];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:55.446208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00102b/r3tmp/tmpPOHO5O/pdisk_1.dat 2024-11-21T23:07:55.727368Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:55.751686Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:56.400474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:56.452869Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:56.467201Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:56.469614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:56.469719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:56.479181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:56.479247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:56.495112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:56.527426Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:07:56.528175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11975, node 1 2024-11-21T23:07:56.726038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/00102b/r3tmp/yandexFHAR5u.tmp 2024-11-21T23:07:56.726068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/00102b/r3tmp/yandexFHAR5u.tmp 2024-11-21T23:07:56.726245Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/00102b/r3tmp/yandexFHAR5u.tmp 2024-11-21T23:07:56.726351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:07:56.865319Z INFO: TTestServer started on Port 11625 GrpcPort 11975 TClient is connected to server localhost:11625 PQClient connected to localhost:11975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:57.294090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:07:59.953441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873258634829722:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:59.954445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873258634829712:2282], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:59.954895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:59.969301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:08:00.047162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873258634829726:2286], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:08:00.391228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873241774373815:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:00.438043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:00.440322Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873241454960422:2245];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:00.440399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:00.449656Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873263249211250:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:08:00.452083Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWFhNjQ5MWItZDFkOGEzYmYtNmZkZTM5ODYtYWVhNDMwNjA=, ActorId: [1:7439873258954243857:2301], ActorState: ExecuteState, TraceId: 01jd8fq03k30w7wt19d59d2k3n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:08:00.488314Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873262929797057:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:08:00.493081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:08:00.490876Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGM5NmMzOTItNzc4ZmU0NDctZmNiNTAwYjQtNTVlYjdjOWI=, ActorId: [2:7439873258634829709:2281], ActorState: ExecuteState, TraceId: 01jd8fq02b7y5b4axnzs4483xa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:08:00.590564Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:08:00.594647Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:08:01.034420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:08:01.388708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:11975", true, true, 1000); 2024-11-21T23:08:02.442969Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 424 } 2024-11-21T23:08:02.443995Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8fq1zrazhxb4n018n244j6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzY5NjkyOTQtYWExZDkxNjItYjI3ODE0MzUtNzAzYzhmMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:08:02.457239Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 ... 88a3c160-2bf98ba6-6c96b996-e2eaff54] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:03.014070Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2024-11-21T23:09:03.014189Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T23:09:03.014276Z :DEBUG: [/Root] [/Root] [88a3c160-2bf98ba6-6c96b996-e2eaff54] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T23:09:01.837000Z WriteTime: 2024-11-21T23:09:01.862000Z Ip: "ipv6:[::1]:52480" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:52480" } } } } 2024-11-21T23:09:03.014457Z :INFO: [/Root] [/Root] [88a3c160-2bf98ba6-6c96b996-e2eaff54] Closing read session. Close timeout: 3.000000s 2024-11-21T23:09:03.014501Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T23:09:03.014548Z :INFO: [/Root] [/Root] [88a3c160-2bf98ba6-6c96b996-e2eaff54] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1629 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:09:03.015159Z :INFO: [/Root] [/Root] [88a3c160-2bf98ba6-6c96b996-e2eaff54] Closing read session. Close timeout: 0.000000s 2024-11-21T23:09:03.015203Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T23:09:03.015243Z :INFO: [/Root] [/Root] [88a3c160-2bf98ba6-6c96b996-e2eaff54] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1630 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:09:03.015366Z :NOTICE: [/Root] [/Root] [88a3c160-2bf98ba6-6c96b996-e2eaff54] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:09:03.016444Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12929758634731591323_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T23:09:03.016591Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_12929758634731591323_v1 got read request: guid# 7a27c66a-831eab7f-e98957e7-a2d9138b 2024-11-21T23:09:03.017820Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_12929758634731591323_v1 grpc closed 2024-11-21T23:09:03.017873Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_12929758634731591323_v1 is DEAD 2024-11-21T23:09:03.022550Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439873525558005158:2502] disconnected; active server actors: 1 2024-11-21T23:09:03.022610Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7439873525558005158:2502] client user disconnected session shared/user_7_1_12929758634731591323_v1 2024-11-21T23:09:03.028094Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:09:03.028158Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_12929758634731591323_v1 2024-11-21T23:09:03.028206Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7439873525558005161:2505] destroyed 2024-11-21T23:09:03.028288Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_12929758634731591323_v1 2024-11-21T23:09:03.158127Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:09:03.158160Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:03.908382Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:7439873534147939973:2540] TxId: 281474976715696. Ctx: { TraceId: 01jd8fry0wfjg8zybzvehya131, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmRhNTMwZTktYTc4ZjQ2YzQtZDljYzBkZDctYWMxNzVkMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 8 2024-11-21T23:09:03.909839Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7439873534147939984:2548], TxId: 281474976715696, task: 4. Ctx: { SessionId : ydb://session/3?node_id=7&id=MmRhNTMwZTktYTc4ZjQ2YzQtZDljYzBkZDctYWMxNzVkMzA=. TraceId : 01jd8fry0wfjg8zybzvehya131. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [7:7439873534147939973:2540], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:09:03.909840Z node 7 :KQP_COMPUTE ERROR: SelfId: [7:7439873534147939983:2547], TxId: 281474976715696, task: 2. Ctx: { TraceId : 01jd8fry0wfjg8zybzvehya131. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=MmRhNTMwZTktYTc4ZjQ2YzQtZDljYzBkZDctYWMxNzVkMzA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [7:7439873534147939973:2540], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:09:05.223425Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.223469Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.223508Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:05.223910Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:05.224490Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:05.224718Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.225671Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T23:09:05.227607Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.227648Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.227684Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:05.228039Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:05.228515Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:05.228678Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.228994Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:05.230365Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:05.230959Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T23:09:05.231097Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T23:09:05.231289Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:05.231338Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:09:05.231366Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:09:05.231420Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T23:09:05.240517Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.240557Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.240607Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:05.241696Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:05.242649Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:05.242821Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.243108Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:05.244038Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.254752Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:05.255000Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:05.255090Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:09:05.255247Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2024-11-21T23:09:05.257395Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.257437Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.257470Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:05.257841Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:05.258245Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:05.258425Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.259227Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:05.259382Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:05.259468Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:05.259578Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TestProgram::Like >> TestProgram::Like [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "suffix" } Constant { Bytes: "001" } } } Command { Assign { Column { Name: "prefix" } Constant { Bytes: "uid" } } } Command { Assign { Column { Name: "start_with" } Function { Id: 33 Arguments { Name: "string" } Arguments { Name: "prefix" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Name: "ends_with" } Function { Id: 34 Arguments { Name: "string" } Arguments { Name: "suffix" } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Name: "start_with_bool" } Function { Id: 18 Arguments { Name: "start_with" } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Name: "ends_with_bool" } Function { Id: 18 Arguments { Name: "ends_with" } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Name: "result" } Function { Id: 11 Arguments { Name: "start_with_bool" } Arguments { Name: "ends_with_bool" } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Name: "result" } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=001;column=G:suffix;};{op=Constant;const=uid;column=G:prefix;};{arguments=[G:string;G:prefix;];kernel=local_function;column=G:start_with;};{arguments=[G:string;G:suffix;];kernel=local_function;column=G:ends_with;};{op=CastBoolean;arguments=[G:start_with;];column=G:start_with_bool;};{op=CastBoolean;arguments=[G:ends_with;];column=G:ends_with_bool;};{op=And;arguments=[G:start_with_bool;G:ends_with_bool;];column=G:result;};];projections=[G:result;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow11BooleanTypeE; >> TestProgram::YqlKernelEndsWithScalar |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2024-11-21T23:08:27.191200Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:27.191372Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:27.259601Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:27.259764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:27.295333Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:27.295968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=11639273222556710234, session=0, seqNo=0) 2024-11-21T23:08:27.296201Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:27.309105Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=11639273222556710234, session=1) 2024-11-21T23:08:27.309979Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2024-11-21T23:08:27.310175Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T23:08:27.310275Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T23:08:27.324804Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2024-11-21T23:08:27.325139Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T23:08:27.346479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2024-11-21T23:08:27.347251Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=7972142239117091615, name="Lock1") 2024-11-21T23:08:27.347362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=7972142239117091615) 2024-11-21T23:08:27.964022Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:08:27.964145Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:08:27.987544Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:08:27.987942Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:08:28.017707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:08:28.018780Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:130:2156], cookie=8618522243193746275, session=0, seqNo=0) 2024-11-21T23:08:28.018950Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:08:28.050340Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:130:2156], cookie=8618522243193746275, session=1) 2024-11-21T23:08:28.050731Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:131:2157], cookie=7933737698787033991, session=0, seqNo=0) 2024-11-21T23:08:28.050888Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T23:08:28.076864Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:131:2157], cookie=7933737698787033991, session=2) 2024-11-21T23:08:28.078135Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2024-11-21T23:08:28.078313Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2024-11-21T23:08:28.078479Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2024-11-21T23:08:28.098114Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=111) 2024-11-21T23:08:28.098579Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2024-11-21T23:08:28.098744Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2024-11-21T23:08:28.098874Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2024-11-21T23:08:28.119094Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:130:2156], cookie=112) 2024-11-21T23:08:28.119479Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2024-11-21T23:08:28.119750Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2024-11-21T23:08:28.142846Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=222) 2024-11-21T23:08:28.142936Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=223) 2024-11-21T23:08:28.143416Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=333, session=2, semaphore="Lock1" count=1) 2024-11-21T23:08:28.143804Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:131:2157], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2024-11-21T23:08:28.161059Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=333) 2024-11-21T23:08:28.161154Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:131:2157], cookie=334) 2024-11-21T23:08:28.523246Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:28.539889Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:28.834784Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:28.848413Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.151929Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.193652Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.478943Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.503144Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:29.813131Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:29.831146Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.084671Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.100827Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.357704Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.375533Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.667402Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.687342Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:30.935606Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:30.951348Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.272987Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.285817Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.550672Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.566357Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:31.858868Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:31.875308Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.152680Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.165156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.438753Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.461831Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:32.773821Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:32.796707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.071947Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.084019Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.354778Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.371156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.647250Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.667051Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:33.968403Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:33.990518Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.304900Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.319772Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.607205Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.631135Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:34.909872Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:34.928543Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.225880Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.244310Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.534826Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.548069Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:35.831854Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:35.851210Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.138260Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.157194Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.440682Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.455292Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:36.726727Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:36.743433Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:37.010156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:37.023414Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:37.282900Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2024-11-21T23:08:37.283053Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2024-11-21T23:08:37.297295Z node 2 :KESUS_TABLET DEBUG: [72 ... 024-11-21T23:08:58.648410Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:58.947717Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:58.965488Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:59.232983Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:59.248464Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:59.542649Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:59.555280Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:08:59.831005Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:08:59.848105Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:00.130809Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:00.151449Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:00.501839Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:00.518984Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:00.810838Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:00.827303Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:01.106232Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:01.123378Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:01.402839Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:01.420240Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:01.701084Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:01.721324Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:02.061376Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:02.073927Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:02.363182Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:02.377106Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:02.666017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:02.685720Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:02.961114Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:02.979384Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:03.271893Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:03.289842Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:03.586802Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:03.599859Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:03.875744Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:03.895352Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:04.177863Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:04.191111Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:04.449160Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:04.467183Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:04.738600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:04.751563Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:05.026848Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:05.043618Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:05.307942Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:05.323280Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:05.599884Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:05.616873Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:05.884790Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:05.902845Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:06.179773Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2024-11-21T23:09:06.199180Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2024-11-21T23:09:06.572231Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2024-11-21T23:09:06.572338Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2024-11-21T23:09:06.592334Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2024-11-21T23:09:06.614122Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:535:2531], cookie=16650732455260230675) 2024-11-21T23:09:06.614269Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:535:2531], cookie=16650732455260230675) 2024-11-21T23:09:06.614820Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:538:2534], cookie=2152144372697094607) 2024-11-21T23:09:06.614894Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:538:2534], cookie=2152144372697094607) 2024-11-21T23:09:06.615439Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:541:2537], cookie=7732181184104868335, name="Lock1") 2024-11-21T23:09:06.615521Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:541:2537], cookie=7732181184104868335) 2024-11-21T23:09:06.616022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:544:2540], cookie=12884748696658985592, name="Lock1") 2024-11-21T23:09:06.616100Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:544:2540], cookie=12884748696658985592) 2024-11-21T23:09:07.284649Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2024-11-21T23:09:07.284768Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2024-11-21T23:09:07.327344Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2024-11-21T23:09:07.327551Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2024-11-21T23:09:07.351125Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2024-11-21T23:09:07.351656Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=14872450130837712843, session=0, seqNo=0) 2024-11-21T23:09:07.351817Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2024-11-21T23:09:07.380343Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=14872450130837712843, session=1) 2024-11-21T23:09:07.380723Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=8722597103293901268, session=0, seqNo=0) 2024-11-21T23:09:07.380868Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2024-11-21T23:09:07.403290Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=8722597103293901268, session=2) 2024-11-21T23:09:07.403724Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:130:2156], cookie=15242966921729067691, session=0, seqNo=0) 2024-11-21T23:09:07.403874Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2024-11-21T23:09:07.424523Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:130:2156], cookie=15242966921729067691, session=3) 2024-11-21T23:09:07.425287Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:143:2167], cookie=2534432864873089348, name="Sem1", limit=3) 2024-11-21T23:09:07.425462Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2024-11-21T23:09:07.442529Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:143:2167], cookie=2534432864873089348) 2024-11-21T23:09:07.442988Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=111, session=1, semaphore="Sem1" count=2) 2024-11-21T23:09:07.443162Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2024-11-21T23:09:07.443375Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=222, session=2, semaphore="Sem1" count=2) 2024-11-21T23:09:07.443634Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:130:2156], cookie=333, session=3, semaphore="Sem1" count=1) 2024-11-21T23:09:07.456377Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=111) 2024-11-21T23:09:07.456490Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=222) 2024-11-21T23:09:07.456537Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:130:2156], cookie=333) 2024-11-21T23:09:07.457286Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:152:2176], cookie=2876523117251300752, name="Sem1") 2024-11-21T23:09:07.457392Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:152:2176], cookie=2876523117251300752) 2024-11-21T23:09:07.457973Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=10388029749547274879, name="Sem1") 2024-11-21T23:09:07.458062Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=10388029749547274879) 2024-11-21T23:09:07.458358Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:130:2156], cookie=444, name="Sem1") 2024-11-21T23:09:07.458492Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2024-11-21T23:09:07.458572Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2024-11-21T23:09:07.459892Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2024-11-21T23:09:07.479026Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:130:2156], cookie=444) 2024-11-21T23:09:07.479810Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2184], cookie=1730897629412396611, name="Sem1") 2024-11-21T23:09:07.479907Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2184], cookie=1730897629412396611) 2024-11-21T23:09:07.480362Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2187], cookie=8366269493238774072, name="Sem1") 2024-11-21T23:09:07.480426Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2187], cookie=8366269493238774072) >> TPDiskTest::DeviceHaltTooLong [GOOD] >> TPDiskTest::ChangePDiskKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "suffix" } Constant { Bytes: "amet." } } } Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "suffix" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=amet.;column=G:suffix;};{arguments=[G:string;G:suffix;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; string: [ "Lorem ipsum dolor sit amet.", "Lorem ipsum dolor sit." ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "0" } Function { Id: 7 Arguments { Name: "uid" } } } } Command { Filter { Predicate { Name: "0" } } } Command { GroupBy { Aggregates { Column { Name: "1" } Function { Id: 2 } } } } Command { Projection { Columns { Name: "1" } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=IsNull;arguments=[uid;];column=G:0;};];filters=[G:0;];group_by_assignes=[{op=num_rows;arguments=[uid;];options={skip_nulls=true, min_count=1};column=G:1;};];projections=[];};{projections=[G:1;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; >> TPDiskTest::ChangePDiskKey [GOOD] >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=152;columns=1; >> TestProgram::JsonValueBinary >> TestProgram::YqlKernelEndsWith [GOOD] >> TestProgram::JsonValueBinary [GOOD] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "suffix" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[G:string;G:suffix;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\001H\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\001H?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?pNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?pJson2.JsonDocumentSqlValueInt64\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Int64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\010\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\006?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p>Json2.JsonDocumentSqlValueInt64\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\024ToIntegral?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Uint64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; json_data: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_data: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=6184;portion_bytes=6184;portion_raw_bytes=5070; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=6184;before_rows=0;after_rows=100; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=6184;portion_bytes=6184;portion_raw_bytes=5070; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); >> TestProgram::YqlKernel >> TestProgram::YqlKernelEquals >> TestProgram::YqlKernel [GOOD] >> TestProgram::YqlKernelEquals [GOOD] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "sum" } Arguments { Name: "vat" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[sum;vat;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int32TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "lhs" } Arguments { Name: "rhs" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[G:lhs;G:rhs;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9FloatTypeE; lhs: [ -2, -1, 0, 1, 2 ] rhs: [ -2, -1.1, 0, 2, 2 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |81.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:19.820167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:19.820275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:19.820329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:19.820382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:19.820439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:19.820496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:19.820570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:19.820950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:19.907086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:19.907156Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:19.921151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:19.942037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:19.942282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:19.969796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:19.970575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:19.971350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:19.971736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:19.979207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:19.980797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:19.980875Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:19.981130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:19.981228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:19.981286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:19.981397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.003460Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:20.277194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:20.277562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.277884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:20.278175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:20.278251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.299814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:20.300046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:20.300342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.300420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:20.300481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:20.300529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:20.309645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.309735Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:20.309775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:20.322706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.322800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.322872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.322960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.335095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:20.343949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:20.344262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:20.345550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:20.345737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:20.345810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.346100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:20.346183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:20.346418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:20.346530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:20.350291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:20.350350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:20.350582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:20.350631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:20.351153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:20.351217Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:20.351321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:20.351372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.351426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:20.351478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:20.351515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:20.351547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:20.351649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:20.351687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:20.351745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:20.353914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:20.354066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:20.354113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:20.354154Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:20.354192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:20.354312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ::TEvFeed: self# [1:3453:5419] 2024-11-21T23:09:10.784419Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:1451 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 09509A18-C961-46C0-981F-2E8C538A2079 amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2024-11-21T23:09:10.788387Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2024-11-21T23:09:10.788661Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T23:09:10.788901Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:1451 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C08E3724-AC19-41D0-875A-5BC20FC87B13 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2024-11-21T23:09:10.793453Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2024-11-21T23:09:10.793958Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T23:09:10.794143Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:1451 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EE40A85E-B64B-46CD-9467-FF41413BDF5E amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2024-11-21T23:09:10.802015Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2024-11-21T23:09:10.802092Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3454:5420], success# 1, error# , multipart# 1, uploadId# 1 2024-11-21T23:09:10.811414Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3454:5420], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:1451 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 69725233-956F-4E45-9665-4170DB078F0B amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2024-11-21T23:09:10.821418Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3454:5420], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2024-11-21T23:09:10.821929Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:09:10.839695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:10.839795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:09:10.839990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:10.840108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:10.840176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:10.840224Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:10.840265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:09:10.840326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:09:10.840519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:10.846197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:10.846959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:10.847019Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:09:10.847155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:09:10.847192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:09:10.847244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:09:10.847334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T23:09:10.847382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:09:10.847422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:09:10.847456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:09:10.847585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:09:10.853629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:09:10.853715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3439:5406] TestWaitNotification: OK eventTxId 102 >> TestProgram::SimpleFunction [GOOD] >> TestProgram::JsonValue [GOOD] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |81.1%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] >> TestProgram::JsonExists |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> TestProgram::JsonExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:21.131667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:21.131790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:21.131834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:21.131889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:21.131940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:21.131977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:21.132037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:21.132359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:21.242661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:21.242720Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:21.260102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:21.264613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:21.264839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:21.272545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:21.273176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:21.273835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:21.274157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:21.279310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:21.280937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:21.281013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:21.281239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:21.281329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:21.281415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:21.281512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.290283Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:21.711821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:21.712126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.712401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:21.712685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:21.712754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.727569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:21.727775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:21.728012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.728080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:21.728121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:21.728160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:21.739592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.739681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:21.739725Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:21.747418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.747507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.747578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:21.747640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:21.751518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:21.756162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:21.756419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:21.757686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:21.757855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:21.757911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:21.758197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:21.758273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:21.758485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:21.758577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:21.767143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:21.767216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:21.767418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:21.767459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:21.767929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:21.767994Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:21.768108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:21.768161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:21.768215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:21.768259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:21.768298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:21.768336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:21.768419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:21.768477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:21.768523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:21.770324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:21.781187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:21.781321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:21.781370Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:21.781425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:21.781586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... canner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T23:09:11.671272Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20756 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C0CE532E-8C83-47AA-BC28-E6F3C7DE2097 amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2024-11-21T23:09:11.687014Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2024-11-21T23:09:11.687408Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T23:09:11.687642Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20756 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 11ADFAEF-F8C5-4D4C-88BA-6C150BDE04AA amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2024-11-21T23:09:11.692158Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2024-11-21T23:09:11.692542Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T23:09:11.692677Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20756 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 81379C8E-CB71-4AB7-ACC4-884DCE59A6C2 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2024-11-21T23:09:11.696509Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3454:5420], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2024-11-21T23:09:11.696576Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3454:5420], success# 1, error# , multipart# 1, uploadId# 1 2024-11-21T23:09:11.704567Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3454:5420], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20756 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D3887EC5-E96F-44AF-8B1B-5347B3E5FBA1 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2024-11-21T23:09:11.718529Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3454:5420], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2024-11-21T23:09:11.719096Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:09:11.749510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:11.749600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:09:11.749789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:11.749913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:11.749993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:11.750065Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:11.750113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:09:11.750163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:09:11.750369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:11.754799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:11.755502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:11.755585Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:09:11.755721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:09:11.755763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:09:11.755819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:09:11.755919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T23:09:11.755979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:09:11.756027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:09:11.756067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:09:11.756255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:09:11.776631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:09:11.776722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3439:5406] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Id: 8 Arguments { Name: "uid" } } } } Command { Projection { Columns { Name: "0" } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=BinaryLength;arguments=[uid;];column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10UInt64TypeE; >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogWithUnionAllAscending ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=6184;portion_bytes=6184;portion_raw_bytes=5070; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=6184;before_rows=0;after_rows=100; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=6184;portion_bytes=6184;portion_raw_bytes=5070; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\001H\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\001H?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r6Json2.SqlValueConvertToUtf8\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Bool FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\006\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\006?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r&Json2.SqlValueInt64\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Int64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\010\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\006?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r&Json2.SqlValueInt64\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\024ToIntegral?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Uint64 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9Int64TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10DoubleTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Column { Name: "json_path" } Constant { Text: "$.key" } } } Command { Assign { Function { Id: 8 Arguments { Name: "json_data" } Arguments { Name: "json_path" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{op=Constant;const=$.key;column=G:json_path;};{arguments=[G:json_data;G:json_path;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10StringTypeE; json_data: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; >> TestProgram::YqlKernelContains |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] Test command err: all chunk reads are received all chunk writes are received all log writes are received restart all chunk reads are received all chunk writes are received all log writes are received warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x990 has a premature terminator entry at offset 0x9a0 warning: address range table at offset 0x9c0 has a premature terminator entry at offset 0x9d0 warning: address range table at offset 0x9f0 has a premature terminator entry at offset 0xa00 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0xa50 has a premature terminator entry at offset 0xa60 warning: address range table at offset 0xa80 has a premature terminator entry at offset 0xa90 warning: address range table at offset 0xab0 has a premature terminator entry at offset 0xac0 warning: address range table at offset 0xae0 has a premature terminator entry at offset 0xaf0 warning: address range table at offset 0xb10 has a premature terminator entry at offset 0xb20 warning: address range table at offset 0xb40 has a premature terminator entry at offset 0xb50 warning: address range table at offset 0xb70 has a premature terminator entry at offset 0xb80 warning: address range table at offset 0xba0 has a premature terminator entry at offset 0xbb0 warning: address range table at offset 0xbd0 has a premature terminator entry at offset 0xbe0 warning: address range table at offset 0xc00 has a premature terminator entry at offset 0xc10 warning: address range table at offset 0xc30 has a premature terminator entry at offset 0xc40 warning: address range table at offset 0xc60 has a premature terminator entry at offset 0xc70 warning: address range table at offset 0xc90 has a premature terminator entry at offset 0xca0 warning: address range table at offset 0xcc0 has a premature terminator entry at offset 0xcd0 warning: address range table at offset 0xcf0 has a premature terminator entry at offset 0xd00 warning: address range table at offset 0xd20 has a premature terminator entry at offset 0xd30 ================================================================= ==9744==ERROR: LeakSanitizer: detected memory leaks Direct leak of 28336 byte(s) in 77 object(s) allocated from: #0 0x271ed0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x5e4c18e in NKikimr::NPDisk::TReqCreator::CreateLogWrite(NKikimr::NPDisk::TEvLog&, NActors::TActorId const&, double&, NWilson::TTraceId) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_req_creator.h:244:27 #2 0x5e41a96 in NKikimr::NPDisk::TPDiskActor::Handle(TAutoPtr, TDelete>&) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp:810:48 #3 0x5e3fa35 in NKikimr::NPDisk::TPDiskActor::StateOnline(TAutoPtr&) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp:1341:5 #4 0x3b86626 in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.h:533:23 #5 0x3b803f8 in NActors::TGenericExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:248:28 #6 0x3b89854 in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*)::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/library/actors/core/executor_thread.cpp:425:39 #7 0x3b8875d in NActors::TGenericExecutorThread::ProcessExecutorPool(NActors::IExecutorPool*) /-S/ydb/library/actors/core/executor_thread.cpp:479:13 #8 0x3b8b14a in NActors::TExecutorThread::ThreadProc() /-S/ydb/library/actors/core/executor_thread.cpp:510:9 #9 0x2a340c4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #10 0x26e8cc8 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 Indirect leak of 19712 byte(s) in 77 object(s) allocated from: #0 0x271ed0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x223e1e1 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:272:10 #2 0x223e1e1 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:298:10 #3 0x223e1e1 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:114:38 #4 0x223e1e1 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:55:19 #5 0x223e1e1 in __grow_by /-S/contrib/libs/cxxsupp/libcxx/include/string:2394:25 #6 0x223e1e1 in __grow_by_without_replace /-S/contrib/libs/cxxsupp/libcxx/include/string:2421:5 #7 0x223e1e1 in __append_default_init /-S/contrib/libs/cxxsupp/libcxx/include/string:2732:13 #8 0x223e1e1 in std::__y1::basic_string, std::__y1::allocator>::__resize_default_init[abi:v180000](unsigned long) /-S/contrib/libs/cxxsupp/libcxx/include/string:3245:8 #9 0x23b07f4 in resize_uninitialized /-S/contrib/libs/cxxsupp/libcxx/include/string:1207:9 #10 0x23b07f4 in ResizeUninitialized, std::__y1::allocator > /-S/util/generic/string.h:33:7 #11 0x23b07f4 in ReserveAndResize /-S/util/generic/string.h:792:9 #12 0x23b07f4 in TBasicString /-S/util/generic/string.h:519:9 #13 0x23b07f4 in Uninitialized /-S/util/generic/string.h:556:16 #14 0x23b07f4 in NKikimr::PrepareData(unsigned int, unsigned int) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp:22:20 #15 0x2223a57 in NKikimr::NTestSuiteTPDiskTest::TTestCaseAllRequestsAreAnsweredOnPDiskRestart::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:998:88 #16 0x222e9d7 in operator() /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:15:1 #17 0x222e9d7 in __invoke<(lambda at /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344:25 #18 0x222e9d7 in __call<(lambda at /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419:5 #19 0x222e9d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195:16 #20 0x222e9d7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366:12 #21 0x2ab3ac8 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519:16 #22 0x2ab3ac8 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170:12 #23 0x2ab3ac8 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #24 0x2a7b1f8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #25 0x222dba3 in NKikimr::NTestSuiteTPDiskTest::TCurrentTest::Execute() /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:15:1 #26 0x2a7cac5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #27 0x2aad67c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #28 0x7f5d2fa13d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 2464 byte(s) in 77 object(s) allocated from: #0 0x271ed0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x23b074c in Construct<> /-S/util/generic/string.h:208:17 #2 0x23b074c in TBasicString /-S/util/generic/string.h:516:14 #3 0x23b074c in Uninitialized /-S/util/generic/string.h:556:16 #4 0x23b074c in NKikimr::PrepareData(unsigned int, unsigned int) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp:22:20 #5 0x2223a57 in NKikimr::NTestSuiteTPDiskTest::TTestCaseAllRequestsAreAnsweredOnPDiskRestart::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:998:88 #6 0x222e9d7 in operator() /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:15:1 #7 0x222e9d7 in __invoke<(lambda at /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344:25 #8 0x222e9d7 in __call<(lambda at /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419:5 #9 0x222e9d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195:16 #10 0x222e9d7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366:12 #11 0x2ab3ac8 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519:16 #12 0x2ab3ac8 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170:12 #13 0x2ab3ac8 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #14 0x2a7b1f8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #15 0x222dba3 in NKikimr::NTestSuiteTPDiskTest::TCurrentTest::Execute() /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp:15:1 #16 0x2a7cac5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #17 0x2aad67c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #18 0x7f5d2fa13d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 40 byte(s) in 1 object(s) allocated from: #0 0x271ed0d in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x5ffa92f in NKikimr::NPDisk::TOwnerData::Reset(bool) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h:217:58 #2 0x5ff7dd4 in NKikimr::NPDisk::TPDisk::YardInitFinish(NKikimr::NPDisk::TYardInit&) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp:1901:19 #3 0x604d735 in NKikimr::NPDisk::TPDisk::ProcessYardInitSet() /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp:3347:17 #4 0x6051776 in NKikimr::NPDisk::TPDisk::Update() /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp:3595:5 #5 0x60548d5 in Exec /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_thread.h:33:19 #6 0x60548d5 in NKikimr::NPDisk::TPDiskThread::ThreadProc(void*) /-S/ydb/core/blobstorage/pdisk/blobstorage_pdisk_thread.h:27:44 #7 0x2a340c4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #8 0x26e8cc8 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 50552 byte(s) leaked in 232 allocation(s). |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:486;event=parse_program;program=Command { Assign { Function { Arguments { Name: "string" } Arguments { Name: "substring" } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Name: "0" } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:499;event=program_parsed;result=[{assignes=[{arguments=[G:string;G:substring;];kernel=local_function;column=G:0;};];projections=[G:0;];};]; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow10BinaryTypeE; string: [ 4C6F72656D20697073756D20C020646F6C6F720C2073697420616D65742E, 4C6F72656D20697073756D20646F6C6F722073697420076D65742E, 4C6F72656D20697073756D20646F6C6F722073697420616D65742E, 4C6F72656D20697073756D20646F6C6F722073697420076D65742E ] substring: [ 646F6C6F72, 616D65742E, 076D65742E, 076D65742E ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:485;T=N5arrow9UInt8TypeE; >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK >> TBSV::CleanupDroppedVolumesOnRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:25.224598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:25.224730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:25.224777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:25.224835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:25.224890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:25.224943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:25.225023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:25.225439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:25.352446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:25.352519Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:25.368348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:25.372352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:25.372580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:25.383157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:25.384147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:25.384809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:25.385131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:25.390650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:25.392019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:25.392079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:25.392298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:25.392376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:25.392421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:25.392527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.409373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:25.618205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:25.618551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.618831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:25.619081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:25.619147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.631389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:25.631587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:25.631838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.631907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:25.631947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:25.631983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:25.639319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.639402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:25.639440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:25.647332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.647434Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.647501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:25.647557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:25.651199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:25.655682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:25.655911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:25.657139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:25.657297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:25.657350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:25.657646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:25.657729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:25.657931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:25.658019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:25.663824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:25.663881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:25.664071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:25.664115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:25.664538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:25.664601Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:25.664720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:25.664768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:25.664826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:25.664865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:25.664897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:25.664926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:25.664998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:25.665038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:25.665068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:25.676892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:25.677070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:25.677112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:25.677153Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:25.677193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:25.677306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 3709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:13.150433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:09:13.150577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T23:09:13.151181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:13.151315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:13.151386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:09:13.151541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T23:09:13.151832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:09:13.348505Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3454:5420], attempt# 0 2024-11-21T23:09:13.422722Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3454:5420], sender# [1:3453:5419] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:19570 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D1F61C27-2192-4B7E-AE30-9C822F1B3DCE amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2024-11-21T23:09:13.439492Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3454:5420], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:19570 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F419CCA3-2A66-4238-A969-3D76EB4C6ACB amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:09:13.456846Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3454:5420], result# PutObjectResult { ETag: 248a8c16c4235c5d1cfac1b4cbe728a2 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:09:13.459702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:13.459766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:09:13.460083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:13.460138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:09:13.461027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:13.461088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:09:13.462190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:09:13.462282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:09:13.462317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:09:13.462360Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:19570 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8A4846DE-0648-4655-80AE-F2195A6FEDCB amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2024-11-21T23:09:13.463305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:09:13.463417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T23:09:13.463693Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3454:5420], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2024-11-21T23:09:13.464275Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3453:5419] 2024-11-21T23:09:13.464850Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3454:5420], sender# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:19570 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2D4A8D97-0706-43CA-AB8D-9D56D4E8B1F3 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2024-11-21T23:09:13.470289Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3454:5420], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2024-11-21T23:09:13.470361Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3454:5420], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2024-11-21T23:09:13.470937Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3453:5419], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:09:13.481213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:09:13.510084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:13.510164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:09:13.510307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:13.510459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2024-11-21T23:09:13.510529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:13.510571Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:13.510607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:09:13.510647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:09:13.510852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:13.516498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:13.516872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:09:13.516937Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:09:13.517097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:09:13.517143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:09:13.517207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:09:13.517292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 102 2024-11-21T23:09:13.517355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:09:13.517395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:09:13.517425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:09:13.517555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:09:13.528650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:09:13.528757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3439:5406] TestWaitNotification: OK eventTxId 102 |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] >> TSchemeShardTTLTests::CheckCounters [GOOD] >> KqpQueryPerf::RangeRead-QueryService >> KqpQueryPerf::Upsert+QueryService |81.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::IndexReplace-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=33376;before_rows=0;after_rows=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=2;before_size=33376;after_size=70288;before_rows=1000;after_rows=2000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=3;before_size=70288;after_size=107232;before_rows=2000;after_rows=3000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=4;before_size=107232;after_size=144208;before_rows=3000;after_rows=4000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=5;before_size=144208;after_size=181232;before_rows=4000;after_rows=5000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;componen ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:366;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:524;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:48.602325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:48.605085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:48.605142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:48.605181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:48.605225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:48.605253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:48.605337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:48.605694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:48.698368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:48.698441Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:48.714720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:48.714816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:48.714960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:48.722505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:48.722720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:48.723367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:48.723578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:48.725492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:48.726890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:48.726981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:48.727104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:48.727179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:48.727261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:48.727519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.734448Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:48.868631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:48.868867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.869053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:48.869233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:48.869279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.871382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:48.871475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:48.871607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.871655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:48.871692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:48.871715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:48.873869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.873918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:48.873948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:48.875360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.875408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.875455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:48.875492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:48.878889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:48.880260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:48.880406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:48.881276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:48.881384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:48.881436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:48.881667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:48.881722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:48.881854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:48.881941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:48.889297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:48.889338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:48.889500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:48.889543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:48.889790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:48.889829Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:48.889899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:48.889921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:48.889951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:48.889989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:48.890014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:48.890053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:48.890108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:48.890139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:48.890160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:48.891932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:48.892032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:48.892068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:48.892117Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:48.892159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:48.892268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & rem ... hard: 72057594046678944 2024-11-21T23:09:15.940361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T23:09:15.940538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T23:09:15.940693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T23:09:15.940747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:15.940803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 107:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:09:15.940924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T23:09:15.940962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T23:09:15.941025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T23:09:15.941143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:352:2327] message: TxId: 107 2024-11-21T23:09:15.941202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T23:09:15.941263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T23:09:15.941310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T23:09:15.941499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:09:15.941538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:09:15.942356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:09:15.942442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:09:15.942533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:15.961092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T23:09:15.961187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1398:3304] 2024-11-21T23:09:15.961489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 2024-11-21T23:09:16.094536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T23:09:16.094773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T23:09:16.095372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T23:09:16.095484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T23:09:16.095534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:09:16.095892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T23:09:16.095947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T23:09:16.095982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:09:16.218638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T23:09:16.218767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:09:16.218884Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:09:16.219057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1732243819064000 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:09:16.219168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1732243819064000 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2024-11-21T23:09:16.219910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T23:09:16.220214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T23:09:16.220765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:09:16.220815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T23:09:16.221409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2024-11-21T23:09:16.221446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T23:09:16.234349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:09:16.234566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2024-11-21T23:09:16.237169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:09:16.237263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2024-11-22T03:50:19.064000Z, at schemeshard: 72057594046678944 2024-11-21T23:09:16.237337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:09:16.237440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:09:16.237470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2024-11-22T03:50:19.064000Z, at schemeshard: 72057594046678944 2024-11-21T23:09:16.237500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:09:16.262138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:09:16.303300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T23:09:16.303440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T23:09:16.303518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T23:09:16.303623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T23:09:16.303683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:09:16.303984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T23:09:16.304036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T23:09:16.304065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:09:16.387867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:09:16.435337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2024-11-21T23:09:16.435462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2024-11-21T23:09:16.435550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2024-11-21T23:09:16.435636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T23:09:16.435684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2024-11-21T23:09:16.435982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2024-11-21T23:09:16.436026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2024-11-21T23:09:16.436052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |81.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |81.3%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestExternalBoot |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:09:17.072064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:09:17.072138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:17.072163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:09:17.072216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:09:17.072253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:09:17.072272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:09:17.072316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:17.072542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:17.141236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:09:17.141293Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:17.163313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:17.167419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:09:17.167641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:09:17.174449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:09:17.175048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:09:17.175641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:17.175932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:17.183149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:17.184413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:17.184474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:17.184706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:17.184755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:17.184791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:17.184903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.191476Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:09:17.326827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:17.327062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.327290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:17.327560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:17.327621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.330101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:17.330241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:17.330471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.330531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:17.330582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:17.330616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:17.335651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.335713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:17.335749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:17.337615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.337658Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.337699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:17.337749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:17.341431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:17.347213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:17.347427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:17.348378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:17.348523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:17.348567Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:17.348900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:17.348948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:17.349109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:17.349245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:17.351422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:17.351467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:17.351698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:17.351744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:09:17.352092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.352140Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:17.352230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:17.352264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:17.352311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:17.352353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:17.352402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:17.352436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:17.352512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:17.352547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:17.352578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:09:17.354551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:17.354662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:17.354707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:09:17.354771Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:09:17.354814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:17.354924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... TICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.547891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.548038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.548236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.548365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.548428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.548480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.555994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:09:17.557962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:17.558336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:17.558414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:17.558544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.558825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:09:17.558870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:09:17.558961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:17.561873Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.561931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:17.561966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:17.562356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:465:2058] recipient: [1:15:2062] 2024-11-21T23:09:17.607318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:09:17.607553Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 281us result status StatusPathDoesNotExist 2024-11-21T23:09:17.607705Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:09:17.608673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:466:2058] recipient: [1:100:2135] Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:469:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:400:2377] sender: [1:470:2058] recipient: [1:468:2430] Leader for TabletID 72057594046678944 is [1:471:2431] sender: [1:472:2058] recipient: [1:468:2430] 2024-11-21T23:09:17.661063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:09:17.661170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:17.661225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:09:17.661264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:09:17.661306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:09:17.661334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:09:17.661388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:17.661737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:17.683135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:17.684398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:09:17.684599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:09:17.684748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:09:17.684780Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:17.684847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:09:17.685502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:17.685611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.685675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.686046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.686341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.687139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.687267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.687425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.687524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.687762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.688041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.688138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.688573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.688671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.688875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.688990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.689090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.689282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.689369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.689509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.689699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.692706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.692789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.692848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:09:17.710967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:17.711056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:17.711542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:17.711609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:17.711655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:17.713994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:471:2431] sender: [1:531:2058] recipient: [1:15:2062] 2024-11-21T23:09:17.748048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:09:17.748260Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 238us result status StatusPathDoesNotExist 2024-11-21T23:09:17.748416Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:53.379484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:53.379579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:53.379619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:53.379653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:53.379700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:53.379728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:53.379789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:53.380121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:53.452447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:53.452495Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:53.466594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:53.469407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:53.469608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:53.475931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:53.476604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:53.477156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.477436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:53.481479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.482677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:53.482777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.482998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:53.483063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:53.483104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:53.483209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.489589Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:53.649120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:53.649374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.649619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:53.649870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:53.649944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.657006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.657178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:53.657394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.657461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:53.657507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:53.657544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:53.663394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.663462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:53.663517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:53.673075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.673142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.673205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.673256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.677363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:53.679320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:53.679528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:53.680580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:53.680703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:53.680756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.680997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:53.681051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:53.681213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:53.681293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:53.683508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:53.683547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:53.683694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:53.683735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:53.684144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:53.684249Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:53.684333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:53.684364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.684424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:53.684460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:53.684493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:53.684521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:53.684598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:53.684634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:53.684664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:53.686376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:53.686512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:53.686550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:53.686584Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:53.686618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:53.686719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... schemeshard: 72057594046678944 2024-11-21T23:09:19.258247Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:19.258369Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:19.262677Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:19.262900Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.289730Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:123:2149] sender: [27:234:2058] recipient: [27:15:2062] 2024-11-21T23:09:19.328285Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:19.328705Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.329053Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:19.329388Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:19.329481Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.337567Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:19.337761Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:19.338102Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.338222Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:19.338306Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:19.338369Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:19.347295Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.347427Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:19.347505Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:19.349974Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.350044Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.350137Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:19.350246Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:19.350561Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:19.352620Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:19.352936Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:19.354139Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:19.354363Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 115964119145 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:19.354480Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:19.354908Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:19.355016Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:19.355384Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:19.355523Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:19.358244Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:19.358354Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:19.358693Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:19.358777Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:09:19.359337Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.359423Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:19.359666Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:19.359748Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:19.359838Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:19.359919Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:19.359983Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:19.360043Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:19.360157Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:19.360224Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:19.360289Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:09:19.361097Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:19.361264Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:19.361333Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:09:19.361410Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:09:19.361490Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:19.361643Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:09:19.365135Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:09:19.365881Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:09:19.367381Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] Bootstrap 2024-11-21T23:09:19.413309Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] Become StateWork (SchemeCache [27:269:2261]) 2024-11-21T23:09:19.416857Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:19.417531Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:19.417737Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2024-11-21T23:09:19.418482Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2024-11-21T23:09:19.419652Z node 27 :TX_PROXY DEBUG: actor# [27:264:2256] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:09:19.424932Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:19.425226Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T23:09:19.428101Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> THiveTest::TestExternalBootWhenLocked [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |81.3%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut >> KqpQueryPerf::Upsert+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2024-11-21T23:08:09.205228Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T23:08:09.208965Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T23:08:09.209215Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T23:08:09.210127Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [3:65:2071] ControllerId# 72057594037932033 2024-11-21T23:08:09.210175Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T23:08:09.210329Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T23:08:09.210599Z node 3 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T23:08:09.211286Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:08:09.211365Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:08:09.213372Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:71:2075] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.213543Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:72:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.213685Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:73:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.213813Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:74:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.213959Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:75:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.214138Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:76:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.214294Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:64:2070] Create Queue# [3:77:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.214326Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:08:09.214425Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:65:2071] 2024-11-21T23:08:09.214490Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:65:2071] 2024-11-21T23:08:09.214546Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T23:08:09.214588Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T23:08:09.215015Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T23:08:09.215273Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T23:08:09.217918Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T23:08:09.218134Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:192} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2024-11-21T23:08:09.218800Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2024-11-21T23:08:09.220026Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:244} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2024-11-21T23:08:09.220078Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T23:08:09.220883Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [1:87:2075] ControllerId# 72057594037932033 2024-11-21T23:08:09.220916Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T23:08:09.220984Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T23:08:09.221186Z node 1 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T23:08:09.230000Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:61:2065] 2024-11-21T23:08:09.230072Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:61:2065] 2024-11-21T23:08:09.240577Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:08:09.240649Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:08:09.242248Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:94:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.242424Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:95:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.242568Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:96:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.242698Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:97:2083] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.242876Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:98:2084] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.243003Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:99:2085] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.243116Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:86:2074] Create Queue# [1:100:2086] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.243155Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:08:09.243231Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:87:2075] 2024-11-21T23:08:09.243256Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:87:2075] 2024-11-21T23:08:09.243308Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T23:08:09.243344Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T23:08:09.244260Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T23:08:09.244391Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:295} Bootstrap 2024-11-21T23:08:09.247064Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:49} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2024-11-21T23:08:09.247217Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2024-11-21T23:08:09.247985Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:19} EstablishPipe AvailDomainId# 0 PipeClientId# [2:108:2072] ControllerId# 72057594037932033 2024-11-21T23:08:09.248017Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:68} SendRegisterNode 2024-11-21T23:08:09.248074Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:270} StartInvalidGroupProxy GroupId# 4294967295 2024-11-21T23:08:09.248273Z node 2 :BS_NODE DEBUG: {NW27@node_warden_impl.cpp:282} StartRequestReportingThrottler 2024-11-21T23:08:09.248822Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:08:09.248866Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:08:09.250353Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:114:2076] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.250507Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:115:2077] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.250729Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:116:2078] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.250861Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:117:2079] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.250985Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:118:2080] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.251141Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:119:2081] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.251286Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:107:2071] Create Queue# [2:120:2082] targetNodeId# 1 Marker# DSP01 2024-11-21T23:08:09.251311Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:08:09.251373Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:108:2072] 2024-11-21T23:08:09.251399Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:108:2072] 2024-11-21T23:08:09.251437Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2024-11-21T23:08:09.251469Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:19} Bootstrap 2024-11-21T23:08:09.251997Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2024-11-21T23:08:09.252240Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:65:2071] 2024-11-21T23:08:09.252305Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T23:08:09.252344Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T23:08:09.252582Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:09.252700Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:61:2065] 2024-11-21T23:08:09.279162Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:87:2075] 2024-11-21T23:08:09.279244Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T23:08:09.279275Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T23:08:09.281193Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:08:09.281404Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:108:2072] 2024-11-21T23:08:09.281443Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:206} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2024-11-21T23:08:09.281469Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2024-11-21T23:08:09.281618Z node 3 :TAB ... # [72057594037927937:2:8:0:0:174:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 22 } Cost# 81370 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 23 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T23:09:20.851809Z node 42 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [72057594037927937:2:8:0:0:174:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T23:09:20.851936Z node 42 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:8:0:0:174:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:09:20.852263Z node 42 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:174:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T23:09:20.852472Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2024-11-21T23:09:20.852985Z node 42 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [42:94:2091], reason = ReasonStop Marker# TSYS29 2024-11-21T23:09:20.853073Z node 42 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2024-11-21T23:09:20.853547Z node 42 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2024-11-21T23:09:20.853608Z node 42 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2024-11-21T23:09:20.853874Z node 42 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2024-11-21T23:09:20.854854Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [42:433:2346] 2024-11-21T23:09:20.854933Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [42:433:2346] 2024-11-21T23:09:20.855084Z node 42 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [42:95:2091] 2024-11-21T23:09:20.855167Z node 42 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [42:95:2091] 2024-11-21T23:09:20.855294Z node 42 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [42:94:2091] EventType# 268960257 2024-11-21T23:09:20.855582Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2024-11-21T23:09:20.855691Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T23:09:20.855883Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T23:09:20.855994Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T23:09:20.856329Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2024-11-21T23:09:20.856422Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T23:09:20.856586Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2024-11-21T23:09:20.856714Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T23:09:20.857333Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [42:446:2353] 2024-11-21T23:09:20.857439Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [42:446:2353] 2024-11-21T23:09:20.857599Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:09:20.857724Z node 42 :TABLET_RESOLVER DEBUG: SelectForward node 42 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [42:363:2294] 2024-11-21T23:09:20.857849Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [42:446:2353] 2024-11-21T23:09:20.857931Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [42:446:2353] 2024-11-21T23:09:20.858081Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [42:446:2353] 2024-11-21T23:09:20.858166Z node 42 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [42:446:2353] 2024-11-21T23:09:20.858286Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2024-11-21T23:09:20.858626Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2024-11-21T23:09:20.858910Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2024-11-21T23:09:20.859021Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2024-11-21T23:09:20.859089Z node 42 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2024-11-21T23:09:20.859217Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T23:09:20.859347Z node 42 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0} 2024-11-21T23:09:20.859522Z node 42 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [42:363:2294] CurrentLeaderTablet: [42:380:2306] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T23:09:20.859790Z node 42 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2024-11-21T23:09:20.860284Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [43:448:2091] 2024-11-21T23:09:20.860373Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [43:448:2091] 2024-11-21T23:09:20.860526Z node 43 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:09:20.860656Z node 43 :TABLET_RESOLVER DEBUG: SelectForward node 43 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [42:314:2258] 2024-11-21T23:09:20.860782Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [43:448:2091] 2024-11-21T23:09:20.860920Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [43:448:2091] 2024-11-21T23:09:20.861034Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 42 [43:448:2091] 2024-11-21T23:09:20.861273Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [43:448:2091] 2024-11-21T23:09:20.861421Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [43:448:2091] 2024-11-21T23:09:20.861805Z node 42 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [43:448:2091] 2024-11-21T23:09:20.862409Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [43:448:2091] 2024-11-21T23:09:20.862531Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [43:448:2091] 2024-11-21T23:09:20.862614Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [43:448:2091] 2024-11-21T23:09:20.862762Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [43:448:2091] 2024-11-21T23:09:20.862865Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [43:448:2091] 2024-11-21T23:09:20.862967Z node 43 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [43:448:2091] 2024-11-21T23:09:20.863399Z node 42 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [43:436:2086] EventType# 268697624 2024-11-21T23:09:20.863681Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2024-11-21T23:09:20.863800Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2024-11-21T23:09:20.864056Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2024-11-21T23:09:20.864190Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2024-11-21T23:09:20.875958Z node 42 :BS_PROXY_PUT INFO: [d70ef3c23a1a2346] bootstrap ActorId# [42:451:2356] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:09:20.876190Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:09:20.876321Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:09:20.876422Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2024-11-21T23:09:20.876523Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2024-11-21T23:09:20.876767Z node 42 :BS_PROXY DEBUG: Send to queueActorId# [42:53:2078] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:09:20.878624Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 23 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 24 }}}} from# [0:1:0:0:0] Marker# BPP01 2024-11-21T23:09:20.878782Z node 42 :BS_PROXY_PUT DEBUG: [d70ef3c23a1a2346] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2024-11-21T23:09:20.878873Z node 42 :BS_PROXY_PUT INFO: [d70ef3c23a1a2346] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:09:20.879190Z node 42 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2024-11-21T23:09:20.879368Z node 42 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 >> KqpQueryPerf::RangeRead-QueryService [GOOD] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:54.209092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:54.209193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:54.209236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:54.209272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:54.209311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:54.209335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:54.209388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:54.209712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:54.296791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:54.296848Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:54.309980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:54.314366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:54.314570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:54.321219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:54.321754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:54.322478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:54.322842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:54.328266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:54.330141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:54.330228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:54.330474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:54.330548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:54.330616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:54.330721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.337626Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:54.500901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:54.501113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.501292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:54.501474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:54.501517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.503709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:54.503828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:54.504010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.504058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:54.504121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:54.504154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:54.509153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.509228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:54.509268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:54.511322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.511386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.511443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:54.511502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:54.515343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:54.518099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:54.518295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:54.519448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:54.519604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:54.519666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:54.519935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:54.520010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:54.520192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:54.520290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:54.522349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:54.522415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:54.522651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:54.522699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:54.523085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:54.523217Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:54.523318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:54.523352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:54.523415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:54.523457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:54.523491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:54.523523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:54.523598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:54.523637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:54.523670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:54.525537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:54.525653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:54.525699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:54.525741Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:54.525782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:54.525891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T23:09:23.228517Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:23.228590Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:23.228963Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:09:23.229220Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:23.229263Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:09:23.229311Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T23:09:23.230151Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:23.230211Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:09:23.232352Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:09:23.232448Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:09:23.232481Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:09:23.232520Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:09:23.232556Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:23.233211Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:09:23.233287Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:09:23.233315Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:09:23.233346Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:09:23.233381Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:09:23.233448Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T23:09:23.234316Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1057 } } 2024-11-21T23:09:23.234361Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T23:09:23.234515Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1057 } } 2024-11-21T23:09:23.234616Z node 28 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1057 } } 2024-11-21T23:09:23.235289Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:23.235334Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T23:09:23.235443Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:23.235487Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:09:23.235562Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 120259086582 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2024-11-21T23:09:23.235620Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:23.235656Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:23.235691Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:09:23.235731Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2024-11-21T23:09:23.246687Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:09:23.249073Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:09:23.249186Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:23.249264Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:23.249497Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:23.249532Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:09:23.249618Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:09:23.249646Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:09:23.249687Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T23:09:23.249753Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:342:2317] message: TxId: 101 2024-11-21T23:09:23.249790Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:09:23.249825Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:09:23.249848Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:09:23.249949Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:09:23.255333Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:09:23.255393Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:343:2318] TestWaitNotification: OK eventTxId 101 2024-11-21T23:09:23.255928Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:09:23.256140Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 251us result status StatusSuccess 2024-11-21T23:09:23.256632Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6509, MsgBus: 7180 2024-11-21T23:09:17.504503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873593672172649:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:17.508739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002950/r3tmp/tmpDgVCfz/pdisk_1.dat 2024-11-21T23:09:18.082967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:18.083089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:18.084937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:18.149633Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6509, node 1 2024-11-21T23:09:18.350085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:18.350120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:18.350133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:18.350230Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7180 TClient is connected to server localhost:7180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:18.987144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.022135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.197336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.424170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.514327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:21.540324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873610852043394:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:21.540427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:21.838473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:09:21.898743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:09:21.976886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.028683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.074078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.138296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.208711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873615147011189:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.208779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.209632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873615147011194:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.213098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:09:22.225994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873615147011196:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:09:22.496433Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873593672172649:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:22.496615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 11711, MsgBus: 7541 2024-11-21T23:09:17.406933Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873594817031446:2061];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:17.416054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002877/r3tmp/tmpTLFnv6/pdisk_1.dat 2024-11-21T23:09:17.849909Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:17.878603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:17.878732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11711, node 1 2024-11-21T23:09:17.880987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:18.164259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:18.164281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:18.164289Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:18.164374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7541 TClient is connected to server localhost:7541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:19.105270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.129845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.366516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.588735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.705126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:21.668146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873611996902325:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:21.682307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:21.709654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:09:21.745476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:09:21.777351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:09:21.849568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:09:21.891031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:09:21.932101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.060468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873616291870127:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.060519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.060689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873616291870132:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.064428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:09:22.074931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873616291870134:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:09:22.410515Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873594817031446:2061];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:22.410599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |81.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> BackupRestore::RestoreTablePartitioningSettings >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable >> BackupRestoreS3::RestoreTablePartitioningSettings >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> KqpQueryPerf::IndexReplace-QueryService [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> KqpQueryPerf::IndexUpdateOn-QueryService [GOOD] |81.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 23399, MsgBus: 22343 2024-11-21T23:09:17.679738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873595357557928:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:17.679813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002955/r3tmp/tmpDA0gXt/pdisk_1.dat 2024-11-21T23:09:18.102997Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23399, node 1 2024-11-21T23:09:18.117808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:18.117924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:18.123023Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:09:18.141287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:18.222908Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:18.222927Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:18.222934Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:18.223033Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22343 TClient is connected to server localhost:22343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:19.179073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.197572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:19.210464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:19.364814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:09:19.539564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:09:19.622644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:22.001450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873612537428813:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.001597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.280019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.314013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.357320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.395794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.425555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.517653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.581229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873616832396607:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.581303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.581547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873616832396612:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.584615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:09:22.594737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873616832396614:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:09:22.680604Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873595357557928:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:22.680672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:23.759628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:09:23.811270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:09:23.864148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> THealthCheckTest::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6099, MsgBus: 65203 2024-11-21T23:09:18.131863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873599380719687:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:18.131954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002934/r3tmp/tmpinGNw2/pdisk_1.dat 2024-11-21T23:09:18.947262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:18.947360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:18.954879Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:18.966999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6099, node 1 2024-11-21T23:09:19.290962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:19.290993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:19.291025Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:19.294306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65203 TClient is connected to server localhost:65203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:20.160662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:09:20.193049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:20.341811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:09:20.527906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:09:20.603663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:22.426315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873616560590577:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.426471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.656633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.685783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.718801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.757789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.788913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.816614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:09:22.862059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873616560591069:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.862138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.862461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873616560591074:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:22.866817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:09:22.877074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873616560591076:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:09:23.150834Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873599380719687:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:23.150898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:24.075849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:09:24.127857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:09:24.205478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> THealthCheckTest::Issues100Groups100VCardListing >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> THealthCheckTest::Issues100GroupsListing >> THealthCheckTest::StorageLimit95 >> THealthCheckTest::SpecificServerless >> THealthCheckTest::OneIssueListing |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> THealthCheckTest::ShardsLimit999 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:56.589028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:56.589120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:56.589167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:56.589206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:56.589280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:56.589314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:56.589399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:56.589734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:56.676549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:56.676638Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:56.686202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:56.686761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:56.686973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:56.709463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:56.709907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:56.710622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:56.712107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:56.716247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.717739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:56.717807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.717879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:56.717928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:56.717967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:56.718231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:56.726157Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:56.891160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:56.891509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.891764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:56.892034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:56.892105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.896604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:56.896745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:56.896987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.897093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:56.897151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:56.897192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:56.900389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.900465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:56.900512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:56.907345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.907409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.907473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:56.907523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:56.916754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:56.918692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:56.918913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:56.919877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:56.920026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:56.920072Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:56.920333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:56.920395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:56.920583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:56.920673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:56.922689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:56.922745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:56.922951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.923001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:56.923383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.923430Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:56.923548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:56.923587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:56.923645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:56.923711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:56.923752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:56.923785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:56.923854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:56.923894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:56.923928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.583704Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T23:09:29.588794Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.588888Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.588918Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T23:09:29.592356Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T23:09:29.592405Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:29.592624Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T23:09:29.592725Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2024-11-21T23:09:29.592751Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2024-11-21T23:09:29.592787Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2024-11-21T23:09:29.593029Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.593090Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.593113Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T23:09:29.593139Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T23:09:29.593167Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T23:09:29.593225Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T23:09:29.594489Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.594543Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:29.594758Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:09:29.594871Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2024-11-21T23:09:29.594905Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T23:09:29.594948Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T23:09:29.594981Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T23:09:29.595022Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T23:09:29.595057Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T23:09:29.595152Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:09:29.595192Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T23:09:29.595226Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T23:09:29.595259Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:09:29.595287Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T23:09:29.595311Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T23:09:29.595355Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T23:09:29.596070Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.598816Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.598938Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.598978Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.599025Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.603974Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:09:29.615615Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 111669152016 } TabletId: 72075186233409546 State: 4 2024-11-21T23:09:29.615709Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T23:09:29.619932Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:09:29.620498Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2024-11-21T23:09:29.620762Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T23:09:29.621044Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2024-11-21T23:09:29.624225Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:09:29.624281Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T23:09:29.624361Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T23:09:29.624404Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:09:29.624444Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:09:29.629305Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T23:09:29.629386Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2024-11-21T23:09:29.630570Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T23:09:29.630829Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T23:09:29.630871Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T23:09:29.632029Z node 26 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T23:09:29.632127Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T23:09:29.632163Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:616:2545] 2024-11-21T23:09:29.637695Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 344 RawX2: 111669152023 } TabletId: 72075186233409547 State: 4 2024-11-21T23:09:29.637785Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T23:09:29.644409Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:09:29.645021Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2024-11-21T23:09:29.645232Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:29.645513Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2024-11-21T23:09:29.648406Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:09:29.648458Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:09:29.648551Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:29.661721Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T23:09:29.661812Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2024-11-21T23:09:29.662030Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T23:09:29.662346Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T23:09:29.662422Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:08:51.454208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:51.454298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.454337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:51.454371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:51.454435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:51.454460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:51.454518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.454840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:51.518976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:51.519017Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:51.536666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:51.539010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:51.539158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:51.543286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:51.544650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:51.545204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.545455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:51.548735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.549573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:51.549613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.549743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:51.549788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:51.549817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:51.549882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.554769Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:08:51.687763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:51.687979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.688180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:51.688371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:51.688430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.695302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.695448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:51.695690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.695749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:51.695789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:51.695823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:51.698482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.698542Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:51.698586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:51.700066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.700120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.700173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.700222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.703607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:51.705562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:51.705731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:51.706744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.706865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:51.706908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.707140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:51.707188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.707347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:51.707417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:51.709200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:51.709243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:51.709386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.709422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:08:51.709812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.709909Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:51.709999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:51.710031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.710091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:51.710129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.710157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:51.710183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:51.710244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:51.710277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:51.710310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:08:51.717371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:51.717521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:08:51.717568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:08:51.717616Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:08:51.717655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:51.717756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... eshard: 72057594046678944 2024-11-21T23:09:29.623240Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:29.623347Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:29.623430Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:29.623617Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.639215Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:123:2149] sender: [37:236:2058] recipient: [37:15:2062] 2024-11-21T23:09:29.654703Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:29.655064Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.655371Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:29.655685Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:29.655791Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.659006Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:29.659196Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:29.659497Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.659601Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:29.659674Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:29.659751Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:29.661927Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.662016Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:29.662086Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:29.663827Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.663916Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.664027Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:29.664145Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:29.664410Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:29.666017Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:29.666325Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:29.667458Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:29.667815Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 158913792106 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:29.667943Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:29.668345Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:29.668453Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:29.668872Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:29.669014Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:29.671144Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:29.671237Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:29.671572Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:29.671669Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:09:29.672333Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.672458Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:29.672770Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:29.672852Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:29.672962Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:29.673075Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:29.673179Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:29.673255Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:29.673361Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:29.673445Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:29.673523Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:09:29.674187Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:29.674440Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:09:29.674518Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:09:29.674602Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:09:29.674693Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:29.674864Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:09:29.677969Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:09:29.678754Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:09:29.680228Z node 37 :TX_PROXY DEBUG: actor# [37:266:2258] Bootstrap 2024-11-21T23:09:29.719515Z node 37 :TX_PROXY DEBUG: actor# [37:266:2258] Become StateWork (SchemeCache [37:271:2263]) 2024-11-21T23:09:29.722990Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:29.723665Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:09:29.723890Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2024-11-21T23:09:29.724760Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2024-11-21T23:09:29.725886Z node 37 :TX_PROXY DEBUG: actor# [37:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:09:29.735076Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:29.735468Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2024-11-21T23:09:29.736364Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> ObjectStorageListingTest::ListingNoFilter |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> VDiskTest::HugeBlobWrite [GOOD] >> TPersQueueTest::DefaultMeteringMode >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobWrite [GOOD] Test command err: Put id# [10:1:1:0:0:10:1] totalSize# 0 blobValueIndex# 8 Trim Put id# [49:1:1:0:0:10:1] totalSize# 10 blobValueIndex# 4 Put id# [78:1:1:0:0:1048576:1] totalSize# 20 blobValueIndex# 47 Put id# [34:1:1:0:0:1048576:1] totalSize# 1048596 blobValueIndex# 49 Change MinHugeBlobSize# 65536 Put id# [1:1:1:0:0:40960:1] totalSize# 2097172 blobValueIndex# 23 Put id# [89:1:1:0:0:40960:1] totalSize# 2138132 blobValueIndex# 27 Put id# [95:1:1:0:0:589824:1] totalSize# 2179092 blobValueIndex# 39 Put id# [1:1:2:0:0:1572864:1] totalSize# 2768916 blobValueIndex# 50 Put id# [66:1:1:0:0:1572864:1] totalSize# 4341780 blobValueIndex# 57 Put id# [47:1:1:0:0:40960:1] totalSize# 5914644 blobValueIndex# 24 Put id# [92:1:1:0:0:10:1] totalSize# 5955604 blobValueIndex# 9 Put id# [85:1:1:0:0:10:1] totalSize# 5955614 blobValueIndex# 8 Put id# [58:1:1:0:0:1024:1] totalSize# 5955624 blobValueIndex# 19 Change MinHugeBlobSize# 524288 Restart Put id# [9:1:1:0:0:1024:1] totalSize# 5956648 blobValueIndex# 19 Change MinHugeBlobSize# 8192 Trim Put id# [23:1:1:0:0:1572864:1] totalSize# 5957672 blobValueIndex# 52 Put id# [36:1:1:0:0:1572864:1] totalSize# 7530536 blobValueIndex# 59 Trim Put id# [14:1:1:0:0:589824:1] totalSize# 9103400 blobValueIndex# 37 Change MinHugeBlobSize# 61440 Put id# [18:1:1:0:0:40960:1] totalSize# 9693224 blobValueIndex# 25 Trim Put id# [61:1:1:0:0:10:1] totalSize# 9734184 blobValueIndex# 0 Trim Put id# [89:1:2:0:0:1572864:1] totalSize# 9734194 blobValueIndex# 51 Put id# [5:1:1:0:0:40960:1] totalSize# 11307058 blobValueIndex# 20 Change MinHugeBlobSize# 65536 Put id# [81:1:1:0:0:1048576:1] totalSize# 11348018 blobValueIndex# 41 Change MinHugeBlobSize# 61440 Put id# [68:1:1:0:0:10:1] totalSize# 12396594 blobValueIndex# 2 Put id# [79:1:1:0:0:40960:1] totalSize# 12396604 blobValueIndex# 29 Trim Put id# [18:1:2:0:0:40960:1] totalSize# 12437564 blobValueIndex# 27 Trim Put id# [9:1:2:0:0:1572864:1] totalSize# 12478524 blobValueIndex# 51 Put id# [90:1:1:0:0:40960:1] totalSize# 14051388 blobValueIndex# 23 Put id# [18:1:3:0:0:1572864:1] totalSize# 14092348 blobValueIndex# 59 Put id# [31:1:1:0:0:1024:1] totalSize# 15665212 blobValueIndex# 15 Put id# [98:1:1:0:0:1024:1] totalSize# 15666236 blobValueIndex# 11 Change MinHugeBlobSize# 524288 Put id# [79:1:2:0:0:1048576:1] totalSize# 15667260 blobValueIndex# 46 Put id# [15:1:1:0:0:10:1] totalSize# 16715836 blobValueIndex# 5 Put id# [37:1:1:0:0:1048576:1] totalSize# 16715846 blobValueIndex# 40 Change MinHugeBlobSize# 65536 Put id# [27:1:1:0:0:1048576:1] totalSize# 17764422 blobValueIndex# 47 Put id# [84:1:1:0:0:1572864:1] totalSize# 18812998 blobValueIndex# 52 Put id# [56:1:1:0:0:1024:1] totalSize# 20385862 blobValueIndex# 15 Restart Put id# [71:1:1:0:0:1048576:1] totalSize# 20386886 blobValueIndex# 44 Put id# [67:1:1:0:0:10:1] totalSize# 21435462 blobValueIndex# 4 Put id# [51:1:1:0:0:1048576:1] totalSize# 21435472 blobValueIndex# 47 Put id# [83:1:1:0:0:40960:1] totalSize# 22484048 blobValueIndex# 21 Put id# [85:1:2:0:0:589824:1] totalSize# 22525008 blobValueIndex# 35 Put id# [79:1:3:0:0:1048576:1] totalSize# 23114832 blobValueIndex# 42 Trim Put id# [7:1:1:0:0:1572864:1] totalSize# 24163408 blobValueIndex# 59 Put id# [59:1:1:0:0:589824:1] totalSize# 25736272 blobValueIndex# 36 Trim Put id# [14:1:2:0:0:1572864:1] totalSize# 26326096 blobValueIndex# 58 Put id# [18:1:4:0:0:10:1] totalSize# 27898960 blobValueIndex# 6 Change MinHugeBlobSize# 12288 Put id# [99:1:1:0:0:10:1] totalSize# 27898970 blobValueIndex# 7 Trim Put id# [61:1:2:0:0:1048576:1] totalSize# 27898980 blobValueIndex# 49 Change MinHugeBlobSize# 65536 Put id# [89:1:3:0:0:1048576:1] totalSize# 28947556 blobValueIndex# 44 Put id# [82:1:1:0:0:1024:1] totalSize# 29996132 blobValueIndex# 11 Put id# [2:1:1:0:0:589824:1] totalSize# 29997156 blobValueIndex# 30 Put id# [62:1:1:0:0:40960:1] totalSize# 30586980 blobValueIndex# 25 Restart Put id# [76:1:1:0:0:10:1] totalSize# 30627940 blobValueIndex# 4 Trim Put id# [83:1:2:0:0:1048576:1] totalSize# 30627950 blobValueIndex# 46 Put id# [23:1:2:0:0:1572864:1] totalSize# 31676526 blobValueIndex# 52 Put id# [84:1:2:0:0:1048576:1] totalSize# 33249390 blobValueIndex# 43 Change MinHugeBlobSize# 8192 Put id# [4:1:1:0:0:1024:1] totalSize# 34297966 blobValueIndex# 16 Change MinHugeBlobSize# 61440 Put id# [21:1:1:0:0:1024:1] totalSize# 34298990 blobValueIndex# 11 Put id# [81:1:2:0:0:1048576:1] totalSize# 34300014 blobValueIndex# 47 Put id# [11:1:1:0:0:40960:1] totalSize# 35348590 blobValueIndex# 26 Trim Put id# [35:1:1:0:0:40960:1] totalSize# 35389550 blobValueIndex# 23 Put id# [15:1:2:0:0:1572864:1] totalSize# 35430510 blobValueIndex# 52 Put id# [76:1:2:0:0:1024:1] totalSize# 37003374 blobValueIndex# 19 Put id# [96:1:1:0:0:40960:1] totalSize# 37004398 blobValueIndex# 28 Put id# [12:1:1:0:0:40960:1] totalSize# 37045358 blobValueIndex# 23 Put id# [23:1:3:0:0:1048576:1] totalSize# 37086318 blobValueIndex# 47 Put id# [73:1:1:0:0:1572864:1] totalSize# 38134894 blobValueIndex# 55 Put id# [78:1:2:0:0:589824:1] totalSize# 39707758 blobValueIndex# 36 Put id# [40:1:1:0:0:589824:1] totalSize# 40297582 blobValueIndex# 31 Put id# [35:1:2:0:0:1572864:1] totalSize# 40887406 blobValueIndex# 51 Put id# [100:1:1:0:0:1024:1] totalSize# 42460270 blobValueIndex# 11 Put id# [72:1:1:0:0:1572864:1] totalSize# 42461294 blobValueIndex# 54 Put id# [94:1:1:0:0:10:1] totalSize# 44034158 blobValueIndex# 1 Put id# [21:1:2:0:0:10:1] totalSize# 44034168 blobValueIndex# 1 Put id# [61:1:3:0:0:589824:1] totalSize# 44034178 blobValueIndex# 31 Put id# [93:1:1:0:0:10:1] totalSize# 44624002 blobValueIndex# 2 Put id# [26:1:1:0:0:1572864:1] totalSize# 44624012 blobValueIndex# 50 Trim Put id# [44:1:1:0:0:589824:1] totalSize# 46196876 blobValueIndex# 36 Put id# [10:1:2:0:0:1024:1] totalSize# 46786700 blobValueIndex# 15 Change MinHugeBlobSize# 65536 Put id# [76:1:3:0:0:10:1] totalSize# 46787724 blobValueIndex# 7 Restart Put id# [67:1:2:0:0:40960:1] totalSize# 46787734 blobValueIndex# 26 Put id# [67:1:3:0:0:1024:1] totalSize# 46828694 blobValueIndex# 17 Put id# [80:1:1:0:0:10:1] totalSize# 46829718 blobValueIndex# 7 Trim Put id# [13:1:1:0:0:1572864:1] totalSize# 46829728 blobValueIndex# 52 Put id# [62:1:2:0:0:1024:1] totalSize# 48402592 blobValueIndex# 11 Trim Put id# [71:1:2:0:0:10:1] totalSize# 48403616 blobValueIndex# 0 Change MinHugeBlobSize# 524288 Put id# [44:1:2:0:0:1572864:1] totalSize# 48403626 blobValueIndex# 59 Put id# [75:1:1:0:0:1024:1] totalSize# 49976490 blobValueIndex# 11 Change MinHugeBlobSize# 8192 Put id# [57:1:1:0:0:1024:1] totalSize# 49977514 blobValueIndex# 16 Put id# [49:1:2:0:0:1572864:1] totalSize# 49978538 blobValueIndex# 52 Put id# [81:1:3:0:0:10:1] totalSize# 51551402 blobValueIndex# 1 Put id# [76:1:4:0:0:589824:1] totalSize# 51551412 blobValueIndex# 31 Put id# [57:1:2:0:0:40960:1] totalSize# 52141236 blobValueIndex# 20 Put id# [60:1:1:0:0:10:1] totalSize# 52182196 blobValueIndex# 8 Put id# [8:1:1:0:0:589824:1] totalSize# 52182206 blobValueIndex# 31 Put id# [24:1:1:0:0:1024:1] totalSize# 52772030 blobValueIndex# 16 Put id# [92:1:2:0:0:10:1] totalSize# 52773054 blobValueIndex# 2 Change MinHugeBlobSize# 65536 Put id# [55:1:1:0:0:589824:1] totalSize# 52773064 blobValueIndex# 32 Put id# [38:1:1:0:0:1572864:1] totalSize# 53362888 blobValueIndex# 50 Put id# [3:1:1:0:0:589824:1] totalSize# 54935752 blobValueIndex# 33 Put id# [1:1:3:0:0:10:1] totalSize# 55525576 blobValueIndex# 3 Change MinHugeBlobSize# 12288 Put id# [31:1:2:0:0:1024:1] totalSize# 55525586 blobValueIndex# 15 Trim Put id# [52:1:1:0:0:589824:1] totalSize# 55526610 blobValueIndex# 38 Put id# [99:1:2:0:0:589824:1] totalSize# 56116434 blobValueIndex# 38 Put id# [49:1:3:0:0:40960:1] totalSize# 56706258 blobValueIndex# 21 Put id# [28:1:1:0:0:10:1] totalSize# 56747218 blobValueIndex# 3 Trim Put id# [76:1:5:0:0:1572864:1] totalSize# 56747228 blobValueIndex# 51 Trim Put id# [96:1:2:0:0:10:1] totalSize# 58320092 blobValueIndex# 4 Trim Put id# [22:1:1:0:0:1572864:1] totalSize# 58320102 blobValueIndex# 58 Trim Put id# [5:1:2:0:0:40960:1] totalSize# 59892966 blobValueIndex# 28 Put id# [67:1:4:0:0:589824:1] totalSize# 59933926 blobValueIndex# 37 Put id# [92:1:3:0:0:1024:1] totalSize# 60523750 blobValueIndex# 15 Put id# [56:1:2:0:0:1048576:1] totalSize# 60524774 blobValueIndex# 48 Put id# [77:1:1:0:0:589824:1] totalSize# 61573350 blobValueIndex# 31 Put id# [86:1:1:0:0:10:1] totalSize# 62163174 blobValueIndex# 9 Trim Put id# [48:1:1:0:0:589824:1] totalSize# 62163184 blobValueIndex# 39 Put id# [6:1:1:0:0:1048576:1] totalSize# 62753008 blobValueIndex# 49 Put id# [55:1:2:0:0:1572864:1] totalSize# 63801584 blobValueIndex# 52 Put id# [99:1:3:0:0:1024:1] totalSize# 65374448 blobValueIndex# 10 Put id# [29:1:1:0:0:10:1] totalSize# 65375472 blobValueIndex# 2 Put id# [31:1:3:0:0:1572864:1] totalSize# 65375482 blobValueIndex# 55 Change MinHugeBlobSize# 8192 Put id# [94:1:2:0:0:1024:1] totalSize# 66948346 blobValueIndex# 16 Trim Put id# [47:1:2:0:0:1048576:1] totalSize# 66949370 blobValueIndex# 43 Put id# [52:1:2:0:0:1048576:1] totalSize# 67997946 blobValueIndex# 41 Put id# [23:1:4:0:0:1024:1] totalSize# 69046522 blobValueIndex# 16 Put id# [55:1:3:0:0:1024:1] totalSize# 69047546 blobValueIndex# 16 Put id# [77:1:2:0:0:40960:1] totalSize# 69048570 blobValueIndex# 26 Put id# [23:1:5:0:0:1024:1] totalSize# 69089530 blobValueIndex# 13 Put id# [94:1:3:0:0:589824:1] totalSize# 69090554 blobValueIndex# 34 Put id# [41:1:1:0:0:589824:1] totalSize# 69680378 blobValueIndex# 30 Change MinHugeBlobSize# 61440 Put id# [81:1:4:0:0:1572864:1] totalSize# 70270202 blobValueIndex# 53 Put id# [80:1:2:0:0:10:1] totalSize# 71843066 blobValueIndex# 3 Change MinHugeBlobSize# 65536 Trim Put id# [14:1:3:0:0:40960:1] totalSize# 71843076 blobValueIndex# 23 Put id# [43:1:1:0:0:1572864:1] totalSize# 71884036 blobValueIndex# 50 Put id# [9:1:3:0:0:1048576:1] totalSize# 73456900 blobValueIndex# 41 Trim Put id# [14:1:4:0:0:40960:1] totalSize# 74505476 blobValueIndex# 29 Trim Restart Put id# [59:1:2:0:0:1572864:1] totalSize# 74546436 blobValueIndex# 53 Put id# [88:1:1:0:0:1572864:1] totalSize# 76119300 blobValueIndex# 55 Put id# [77:1:3:0:0:1572864:1] totalSize# 77692164 blobValueIndex# 58 Change MinHugeBlobSize# 12288 Put id# [10:1:3:0:0:1572864:1] totalSize# 79265028 blobValueIndex# 58 Put id# [68:1:2:0:0:589824:1] totalSize# 80837892 blobValueIndex# 32 Restart Put id# [43:1:2:0:0:40960:1] totalSize# 81427716 blobValueIndex# 27 Put id# [75:1:2:0:0:10:1] totalSize# 81468676 blobValueIndex# 7 Put id# [62:1:3:0:0:40960:1] totalSize# 81468686 blobValueIndex# 29 Put id# [99:1:4:0:0:1572864:1] totalSize# 81509646 blobValueIndex# 55 Put id# [30:1:1:0:0:1048576:1] totalSize# 83082510 blobValueIndex# 45 Put id# [6:1:2:0:0:40960:1] totalSize# 84131086 blobValueIndex# 29 Put id# [15:1:3:0:0:1024:1] totalSize# 84172046 blobValueIndex# 14 Trim Put id# [64:1:1:0:0:1048576:1] totalSize# 84173070 blobValueIndex# 45 Trim Put id# [87:1:1:0:0:40960:1] totalSize# 85221646 blobValueIndex# 29 Put id# [37:1:2:0:0:589824:1] totalSize# 85262606 blobValueIndex# 34 Trim Put id# [79:1:4:0:0:10:1] totalSize# 85852430 blobValueIndex# 5 Put id# [100:1:2:0:0:1572864:1] totalSize# 85852440 blobValueIndex# 57 Put id# [100:1:3:0:0:1048576:1] totalSize# 87425304 blobValueIndex# 42 Put id# [95:1:2:0:0:1572864:1] totalSize# 88473880 blobValueIndex# 55 Put id# [45:1:1:0:0:1572864:1] totalSize# 90046744 blobValueIndex# 53 Restart Put id# [83:1:3:0:0:589824:1] totalSize# 91619608 blobValueIndex# 39 Put id# [10:1:4:0:0:1024:1] totalSize# 92209432 blobValueIndex# 17 Put id# [62:1:4:0:0:589824:1] totalSize# 92210456 blobValueIndex# 37 Put id# [6:1:3:0:0:40960:1] totalSize# 92800280 blobValueIndex# 26 Restart Put id# [86:1:2:0:0:40960:1] totalSize# 92841240 blobValueIndex# 24 Put id# [67:1:5:0:0:589824:1] totalSize# 92882200 blobValueIndex# 30 Put id# [94:1:4:0:0:1024:1] totalSize# 93472024 blobValueIndex# 14 Put id# [84:1:3:0:0:1024:1] totalSize# 93473048 blobValueIndex# 13 Change MinHugeBlobSize# 8192 Put id# [82:1:2:0:0:1024:1] totalSi ... 31212166 blobValueIndex# 17 Put id# [65:1:21:0:0:10:1] totalSize# 931213190 blobValueIndex# 3 Put id# [85:1:21:0:0:1572864:1] totalSize# 931213200 blobValueIndex# 54 Restart Put id# [99:1:24:0:0:1024:1] totalSize# 932786064 blobValueIndex# 15 Put id# [58:1:17:0:0:1572864:1] totalSize# 932787088 blobValueIndex# 55 Trim Put id# [14:1:21:0:0:1024:1] totalSize# 934359952 blobValueIndex# 12 Change MinHugeBlobSize# 8192 Restart Put id# [21:1:17:0:0:10:1] totalSize# 934360976 blobValueIndex# 6 Change MinHugeBlobSize# 61440 Put id# [53:1:19:0:0:1572864:1] totalSize# 934360986 blobValueIndex# 51 Trim Put id# [56:1:24:0:0:10:1] totalSize# 935933850 blobValueIndex# 0 Put id# [24:1:22:0:0:1024:1] totalSize# 935933860 blobValueIndex# 19 Change MinHugeBlobSize# 524288 Put id# [69:1:18:0:0:589824:1] totalSize# 935934884 blobValueIndex# 36 Put id# [78:1:20:0:0:1048576:1] totalSize# 936524708 blobValueIndex# 44 Trim Put id# [51:1:17:0:0:10:1] totalSize# 937573284 blobValueIndex# 1 Put id# [25:1:7:0:0:1048576:1] totalSize# 937573294 blobValueIndex# 44 Put id# [37:1:20:0:0:10:1] totalSize# 938621870 blobValueIndex# 1 Put id# [2:1:12:0:0:1048576:1] totalSize# 938621880 blobValueIndex# 48 Put id# [51:1:18:0:0:589824:1] totalSize# 939670456 blobValueIndex# 37 Restart Put id# [97:1:18:0:0:1048576:1] totalSize# 940260280 blobValueIndex# 48 Trim Put id# [47:1:23:0:0:589824:1] totalSize# 941308856 blobValueIndex# 34 Put id# [79:1:25:0:0:1048576:1] totalSize# 941898680 blobValueIndex# 42 Change MinHugeBlobSize# 8192 Trim Put id# [61:1:25:0:0:1048576:1] totalSize# 942947256 blobValueIndex# 42 Put id# [46:1:18:0:0:1048576:1] totalSize# 943995832 blobValueIndex# 40 Put id# [62:1:27:0:0:1572864:1] totalSize# 945044408 blobValueIndex# 57 Put id# [24:1:23:0:0:1048576:1] totalSize# 946617272 blobValueIndex# 47 Put id# [69:1:19:0:0:1024:1] totalSize# 947665848 blobValueIndex# 14 Put id# [30:1:18:0:0:1572864:1] totalSize# 947666872 blobValueIndex# 57 Put id# [59:1:24:0:0:1572864:1] totalSize# 949239736 blobValueIndex# 58 Put id# [15:1:16:0:0:1024:1] totalSize# 950812600 blobValueIndex# 11 Put id# [48:1:17:0:0:1024:1] totalSize# 950813624 blobValueIndex# 13 Put id# [69:1:20:0:0:1024:1] totalSize# 950814648 blobValueIndex# 19 Put id# [25:1:8:0:0:1024:1] totalSize# 950815672 blobValueIndex# 19 Put id# [81:1:17:0:0:1572864:1] totalSize# 950816696 blobValueIndex# 55 Restart Put id# [54:1:24:0:0:1024:1] totalSize# 952389560 blobValueIndex# 10 Put id# [13:1:13:0:0:1572864:1] totalSize# 952390584 blobValueIndex# 57 Trim Put id# [98:1:24:0:0:1048576:1] totalSize# 953963448 blobValueIndex# 42 Put id# [18:1:16:0:0:1048576:1] totalSize# 955012024 blobValueIndex# 44 Put id# [26:1:16:0:0:1024:1] totalSize# 956060600 blobValueIndex# 19 Put id# [60:1:12:0:0:589824:1] totalSize# 956061624 blobValueIndex# 34 Put id# [86:1:27:0:0:10:1] totalSize# 956651448 blobValueIndex# 9 Put id# [19:1:16:0:0:1024:1] totalSize# 956651458 blobValueIndex# 16 Change MinHugeBlobSize# 524288 Put id# [84:1:14:0:0:1024:1] totalSize# 956652482 blobValueIndex# 15 Change MinHugeBlobSize# 8192 Put id# [40:1:12:0:0:1572864:1] totalSize# 956653506 blobValueIndex# 51 Put id# [55:1:18:0:0:1572864:1] totalSize# 958226370 blobValueIndex# 55 Put id# [55:1:19:0:0:1048576:1] totalSize# 959799234 blobValueIndex# 41 Put id# [60:1:13:0:0:589824:1] totalSize# 960847810 blobValueIndex# 31 Put id# [43:1:22:0:0:1048576:1] totalSize# 961437634 blobValueIndex# 41 Put id# [29:1:21:0:0:10:1] totalSize# 962486210 blobValueIndex# 1 Put id# [98:1:25:0:0:1572864:1] totalSize# 962486220 blobValueIndex# 56 Put id# [11:1:18:0:0:10:1] totalSize# 964059084 blobValueIndex# 6 Change MinHugeBlobSize# 12288 Put id# [8:1:18:0:0:10:1] totalSize# 964059094 blobValueIndex# 3 Put id# [2:1:13:0:0:1572864:1] totalSize# 964059104 blobValueIndex# 56 Change MinHugeBlobSize# 8192 Put id# [25:1:9:0:0:589824:1] totalSize# 965631968 blobValueIndex# 37 Put id# [13:1:14:0:0:589824:1] totalSize# 966221792 blobValueIndex# 33 Trim Put id# [40:1:13:0:0:1024:1] totalSize# 966811616 blobValueIndex# 11 Trim Put id# [44:1:18:0:0:40960:1] totalSize# 966812640 blobValueIndex# 26 Put id# [65:1:22:0:0:1024:1] totalSize# 966853600 blobValueIndex# 17 Trim Put id# [51:1:19:0:0:589824:1] totalSize# 966854624 blobValueIndex# 36 Put id# [21:1:18:0:0:1572864:1] totalSize# 967444448 blobValueIndex# 52 Put id# [25:1:10:0:0:1024:1] totalSize# 969017312 blobValueIndex# 13 Put id# [61:1:26:0:0:40960:1] totalSize# 969018336 blobValueIndex# 27 Trim Put id# [15:1:17:0:0:40960:1] totalSize# 969059296 blobValueIndex# 26 Trim Put id# [81:1:18:0:0:1024:1] totalSize# 969100256 blobValueIndex# 12 Change MinHugeBlobSize# 65536 Restart Put id# [26:1:17:0:0:1024:1] totalSize# 969101280 blobValueIndex# 16 Put id# [45:1:17:0:0:40960:1] totalSize# 969102304 blobValueIndex# 25 Put id# [67:1:23:0:0:1024:1] totalSize# 969143264 blobValueIndex# 14 Put id# [51:1:20:0:0:40960:1] totalSize# 969144288 blobValueIndex# 23 Put id# [53:1:20:0:0:40960:1] totalSize# 969185248 blobValueIndex# 27 Put id# [44:1:19:0:0:10:1] totalSize# 969226208 blobValueIndex# 3 Change MinHugeBlobSize# 8192 Trim Put id# [79:1:26:0:0:40960:1] totalSize# 969226218 blobValueIndex# 22 Put id# [72:1:20:0:0:1024:1] totalSize# 969267178 blobValueIndex# 16 Change MinHugeBlobSize# 65536 Put id# [59:1:25:0:0:40960:1] totalSize# 969268202 blobValueIndex# 25 Put id# [50:1:19:0:0:589824:1] totalSize# 969309162 blobValueIndex# 33 Put id# [60:1:14:0:0:589824:1] totalSize# 969898986 blobValueIndex# 36 Trim Put id# [44:1:20:0:0:589824:1] totalSize# 970488810 blobValueIndex# 37 Put id# [90:1:11:0:0:1024:1] totalSize# 971078634 blobValueIndex# 10 Put id# [69:1:21:0:0:1024:1] totalSize# 971079658 blobValueIndex# 14 Put id# [88:1:14:0:0:1024:1] totalSize# 971080682 blobValueIndex# 16 Put id# [58:1:18:0:0:1572864:1] totalSize# 971081706 blobValueIndex# 53 Put id# [11:1:19:0:0:10:1] totalSize# 972654570 blobValueIndex# 3 Change MinHugeBlobSize# 8192 Put id# [23:1:16:0:0:1572864:1] totalSize# 972654580 blobValueIndex# 56 Put id# [96:1:17:0:0:589824:1] totalSize# 974227444 blobValueIndex# 32 Put id# [58:1:19:0:0:589824:1] totalSize# 974817268 blobValueIndex# 35 Put id# [79:1:27:0:0:1048576:1] totalSize# 975407092 blobValueIndex# 47 Put id# [74:1:16:0:0:40960:1] totalSize# 976455668 blobValueIndex# 29 Change MinHugeBlobSize# 65536 Put id# [57:1:19:0:0:1024:1] totalSize# 976496628 blobValueIndex# 16 Trim Put id# [87:1:21:0:0:1024:1] totalSize# 976497652 blobValueIndex# 17 Trim Put id# [28:1:13:0:0:40960:1] totalSize# 976498676 blobValueIndex# 20 Trim Restart Put id# [92:1:21:0:0:589824:1] totalSize# 976539636 blobValueIndex# 31 Trim Put id# [19:1:17:0:0:10:1] totalSize# 977129460 blobValueIndex# 7 Put id# [95:1:18:0:0:10:1] totalSize# 977129470 blobValueIndex# 9 Trim Put id# [30:1:19:0:0:1572864:1] totalSize# 977129480 blobValueIndex# 58 Trim Restart Put id# [72:1:21:0:0:1024:1] totalSize# 978702344 blobValueIndex# 13 Put id# [90:1:12:0:0:1048576:1] totalSize# 978703368 blobValueIndex# 48 Change MinHugeBlobSize# 8192 Put id# [85:1:22:0:0:40960:1] totalSize# 979751944 blobValueIndex# 24 Put id# [45:1:18:0:0:1048576:1] totalSize# 979792904 blobValueIndex# 43 Put id# [88:1:15:0:0:589824:1] totalSize# 980841480 blobValueIndex# 39 Put id# [9:1:20:0:0:10:1] totalSize# 981431304 blobValueIndex# 1 Put id# [20:1:10:0:0:1572864:1] totalSize# 981431314 blobValueIndex# 55 Put id# [72:1:22:0:0:1572864:1] totalSize# 983004178 blobValueIndex# 54 Put id# [43:1:23:0:0:1024:1] totalSize# 984577042 blobValueIndex# 19 Change MinHugeBlobSize# 12288 Put id# [88:1:16:0:0:10:1] totalSize# 984578066 blobValueIndex# 7 Put id# [94:1:23:0:0:1572864:1] totalSize# 984578076 blobValueIndex# 52 Trim Put id# [14:1:22:0:0:40960:1] totalSize# 986150940 blobValueIndex# 27 Put id# [16:1:11:0:0:10:1] totalSize# 986191900 blobValueIndex# 5 Put id# [9:1:21:0:0:1048576:1] totalSize# 986191910 blobValueIndex# 45 Put id# [97:1:19:0:0:589824:1] totalSize# 987240486 blobValueIndex# 36 Put id# [35:1:23:0:0:1572864:1] totalSize# 987830310 blobValueIndex# 51 Put id# [84:1:15:0:0:1024:1] totalSize# 989403174 blobValueIndex# 10 Put id# [10:1:22:0:0:1024:1] totalSize# 989404198 blobValueIndex# 16 Put id# [45:1:19:0:0:1024:1] totalSize# 989405222 blobValueIndex# 14 Put id# [42:1:21:0:0:589824:1] totalSize# 989406246 blobValueIndex# 32 Put id# [83:1:24:0:0:40960:1] totalSize# 989996070 blobValueIndex# 22 Put id# [51:1:21:0:0:1572864:1] totalSize# 990037030 blobValueIndex# 56 Put id# [67:1:24:0:0:1572864:1] totalSize# 991609894 blobValueIndex# 50 Put id# [49:1:21:0:0:40960:1] totalSize# 993182758 blobValueIndex# 25 Put id# [38:1:19:0:0:1048576:1] totalSize# 993223718 blobValueIndex# 48 Put id# [55:1:20:0:0:10:1] totalSize# 994272294 blobValueIndex# 5 Put id# [97:1:20:0:0:1572864:1] totalSize# 994272304 blobValueIndex# 56 Put id# [5:1:30:0:0:10:1] totalSize# 995845168 blobValueIndex# 2 Put id# [28:1:14:0:0:10:1] totalSize# 995845178 blobValueIndex# 4 Put id# [46:1:19:0:0:10:1] totalSize# 995845188 blobValueIndex# 4 Trim Put id# [72:1:23:0:0:1048576:1] totalSize# 995845198 blobValueIndex# 48 Put id# [61:1:27:0:0:10:1] totalSize# 996893774 blobValueIndex# 0 Put id# [90:1:13:0:0:10:1] totalSize# 996893784 blobValueIndex# 8 Put id# [75:1:14:0:0:1024:1] totalSize# 996893794 blobValueIndex# 12 Put id# [25:1:11:0:0:1048576:1] totalSize# 996894818 blobValueIndex# 42 Change MinHugeBlobSize# 61440 Trim Put id# [87:1:22:0:0:589824:1] totalSize# 997943394 blobValueIndex# 35 Change MinHugeBlobSize# 12288 Put id# [93:1:16:0:0:40960:1] totalSize# 998533218 blobValueIndex# 26 Trim Put id# [84:1:16:0:0:10:1] totalSize# 998574178 blobValueIndex# 4 Put id# [44:1:21:0:0:589824:1] totalSize# 998574188 blobValueIndex# 37 Put id# [29:1:22:0:0:10:1] totalSize# 999164012 blobValueIndex# 1 Change MinHugeBlobSize# 65536 Trim Put id# [87:1:23:0:0:589824:1] totalSize# 999164022 blobValueIndex# 37 Change MinHugeBlobSize# 12288 Put id# [50:1:20:0:0:589824:1] totalSize# 999753846 blobValueIndex# 31 Change MinHugeBlobSize# 65536 Put id# [58:1:20:0:0:1048576:1] totalSize# 1000343670 blobValueIndex# 48 Put id# [85:1:23:0:0:40960:1] totalSize# 1001392246 blobValueIndex# 22 Put id# [62:1:28:0:0:10:1] totalSize# 1001433206 blobValueIndex# 6 Put id# [62:1:29:0:0:589824:1] totalSize# 1001433216 blobValueIndex# 34 Restart Put id# [96:1:18:0:0:589824:1] totalSize# 1002023040 blobValueIndex# 39 Trim Put id# [12:1:14:0:0:1048576:1] totalSize# 1002612864 blobValueIndex# 45 Put id# [34:1:14:0:0:40960:1] totalSize# 1003661440 blobValueIndex# 24 Put id# [82:1:19:0:0:1024:1] totalSize# 1003702400 blobValueIndex# 19 Put id# [6:1:17:0:0:1572864:1] totalSize# 1003703424 blobValueIndex# 56 Put id# [71:1:11:0:0:10:1] totalSize# 1005276288 blobValueIndex# 2 Change MinHugeBlobSize# 61440 Put id# [43:1:24:0:0:589824:1] totalSize# 1005276298 blobValueIndex# 34 Put id# [47:1:24:0:0:10:1] totalSize# 1005866122 blobValueIndex# 3 Put id# [73:1:15:0:0:1048576:1] totalSize# 1005866132 blobValueIndex# 42 Put id# [35:1:24:0:0:40960:1] totalSize# 1006914708 blobValueIndex# 29 Put id# [42:1:22:0:0:40960:1] totalSize# 1006955668 blobValueIndex# 26 Put id# [10:1:23:0:0:1572864:1] totalSize# 1006996628 blobValueIndex# 51 Put id# [75:1:15:0:0:589824:1] totalSize# 1008569492 blobValueIndex# 30 Put id# [84:1:17:0:0:10:1] totalSize# 1009159316 blobValueIndex# 8 Put id# [9:1:22:0:0:1048576:1] totalSize# 1009159326 blobValueIndex# 48 Trim Put id# [5:1:31:0:0:1572864:1] totalSize# 1010207902 blobValueIndex# 51 Put id# [81:1:19:0:0:40960:1] totalSize# 1011780766 blobValueIndex# 26 Put id# [41:1:21:0:0:589824:1] totalSize# 1011821726 blobValueIndex# 38 Trim Put id# [18:1:17:0:0:1572864:1] totalSize# 1012411550 blobValueIndex# 50 Trim Put id# [82:1:20:0:0:1572864:1] totalSize# 1013984414 blobValueIndex# 55 Put id# [30:1:20:0:0:1048576:1] totalSize# 1015557278 blobValueIndex# 42 Put id# [67:1:25:0:0:589824:1] totalSize# 1016605854 blobValueIndex# 33 Put id# [60:1:15:0:0:1048576:1] totalSize# 1017195678 blobValueIndex# 45 Restart >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> TPersQueueTest::PartitionsMapping [GOOD] >> TPersQueueTest::MessageMetadata ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2024-11-21T23:09:33.589874Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.589903Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.589927Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:33.590329Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:33.591976Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:09:33.592053Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.593088Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.593108Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.593134Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:33.593386Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:33.593652Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:09:33.593700Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.594552Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.594574Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.594596Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:33.597060Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:09:33.597117Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.597150Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.597708Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2024-11-21T23:09:33.598993Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.599012Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.599031Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:33.599399Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:09:33.599435Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.599466Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.599537Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2024-11-21T23:09:33.601574Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:09:33.601604Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:09:33.601639Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:33.601932Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:33.602463Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:33.612715Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:09:33.613374Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:33.613805Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (empty maybe) 2024-11-21T23:09:33.620798Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2024-11-21T23:09:33.621073Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:33.621122Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:09:33.621148Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:09:33.621163Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T23:09:33.621178Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T23:09:33.621193Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T23:09:33.621207Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2024-11-21T23:09:33.621224Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2024-11-21T23:09:33.621327Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2024-11-21T23:09:33.621349Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2024-11-21T23:09:33.621364Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2024-11-21T23:09:33.621381Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2024-11-21T23:09:33.621396Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2024-11-21T23:09:33.621412Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2024-11-21T23:09:33.621429Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2024-11-21T23:09:33.621445Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2024-11-21T23:09:33.621554Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2024-11-21T23:09:33.621573Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2024-11-21T23:09:33.621592Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2024-11-21T23:09:33.621606Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2024-11-21T23:09:33.621621Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2024-11-21T23:09:33.621637Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2024-11-21T23:09:33.621651Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2024-11-21T23:09:33.621663Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2024-11-21T23:09:33.621673Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2024-11-21T23:09:33.621693Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2024-11-21T23:09:33.621707Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2024-11-21T23:09:33.621719Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2024-11-21T23:09:33.621741Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2024-11-21T23:09:33.621762Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2024-11-21T23:09:33.621790Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2024-11-21T23:09:33.621807Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2024-11-21T23:09:33.621835Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2024-11-21T23:09:33.621858Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2024-11-21T23:09:33.621884Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2024-11-21T23:09:33.621900Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2024-11-21T23:09:33.621920Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2024-11-21T23:09:33.621940Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2024-11-21T23:09:33.621952Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2024-11-21T23:09:33.621964Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2024-11-21T23:09:33.621977Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2024-11-21T23:09:33.621990Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2024-11-21T23:09:33.622003Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2024-11-21T23:09:33.622017Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2024-11-21T23:09:33.622028Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2024-11-21T23:09:33.622040Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2024-11-21T23:09:33.622054Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2024-11-21T23:09:33.622067Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2024-11-21T23:09:33.622082Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2024-11-21T23:09:33.622105Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2024-11-21T23:09:33.622173Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T23:09:33.624346Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2024-11-21T23:09:33.624530Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2024-11-21T23:09:33.624566Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2024-11-21T23:09:33.624608Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2024-11-21T23:09:33.624627Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2024-11-21T23:09:33.624647Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2024-11-21T23:09:33.624668Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2024-11-21T23:09:33.624697Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2024-11-21T23:09:33.624716Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2024-11-21T23:09:33.624738Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2024-11-21T23:09:33.624753Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2024-11-21T23:09:33.624778Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2024-11-21T23:09:33.624804Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2024-11-21T23:09:33.624819Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2024-11-21T23:09:33.624835Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2024-11-21T23:09:33.624848Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2024-11-21T23:09:33.624872Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2024-11-21T23:09:33.624912Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2024-11-21T23:09:33.624942Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2024-11-21T23:09:33.624956Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2024-11-21T23:09:33.624967Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2024-11-21T23:09:33.624980Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2024-11-21T23:09:33.624993Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2024-11-21T23:09:33.625016Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2024-11-21T23:09:33.625034Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2024-11-21T23:09:33.625070Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2024-11-21T23:09:33.625089Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2024-11-21T23:09:33.625112Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2024-11-21T23:09:33.625127Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2024-11-21T23:09:33.625142Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2024-11-21T23:09:33.625158Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2024-11-21T23:09:33.625175Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2024-11-21T23:09:33.625202Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2024-11-21T23:09:33.625245Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2024-11-21T23:09:33.625265Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2024-11-21T23:09:33.625281Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2024-11-21T23:09:33.625298Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2024-11-21T23:09:33.625315Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2024-11-21T23:09:33.625332Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2024-11-21T23:09:33.625354Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2024-11-21T23:09:33.625373Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2024-11-21T23:09:33.625391Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2024-11-21T23:09:33.625407Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2024-11-21T23:09:33.625430Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2024-11-21T23:09:33.625448Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2024-11-21T23:09:33.625464Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2024-11-21T23:09:33.625481Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2024-11-21T23:09:33.625504Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2024-11-21T23:09:33.625613Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2024-11-21T23:09:33.625646Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2024-11-21T23:09:33.625668Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2024-11-21T23:09:33.625734Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2024-11-21T23:09:33.625909Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2024-11-21T23:09:33.627838Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.627864Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.627883Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:33.628229Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:33.628708Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:33.628946Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.630045Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:33.730254Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.730572Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:09:33.730645Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:33.730698Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:09:33.730783Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:09:33.932243Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T23:09:34.038651Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:09:34.040704Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:09:34.040967Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:09:34.042339Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.042368Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.042403Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.046842Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.047486Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.047926Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.049968Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.158559Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.163944Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:09:34.164019Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.164066Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:09:34.164159Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T23:09:34.164263Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:09:34.164865Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:09:34.165016Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:09:34.165118Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero |81.5%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2024-11-21T23:09:28.028520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873639595359960:2185];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:28.028588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019ac/r3tmp/tmpYwS1P9/pdisk_1.dat 2024-11-21T23:09:28.939625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:28.939711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:28.954984Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:28.972728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21148, node 1 2024-11-21T23:09:29.324266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:29.324295Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:29.324301Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:29.324456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:29.919510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.925702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:29.925762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.928152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:29.928322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:29.928342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:09:29.930324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:29.930346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T23:09:29.932086Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.935307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:29.935674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230569979, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:29.935739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:29.936061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:29.940648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:29.940862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:29.940915Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:29.941013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:29.941052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:29.941093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:29.943294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:29.943338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:29.943356Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:29.943424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:09:30.061200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/dir, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:30.061435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:30.067365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/dir 2024-11-21T23:09:30.067589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:30.067808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:30.067882Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:30.068882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:30.068919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:30.068936Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:09:30.069165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:30.069185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:30.069197Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T23:09:30.070703Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:09:30.073706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230570119, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:30.073766Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230570119, at schemeshard: 72057594046644480 2024-11-21T23:09:30.073909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2024-11-21T23:09:30.075620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:30.075787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:30.075834Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710658:0 ProgressState 2024-11-21T23:09:30.075886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:09:30.075912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:09:30.075946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2024-11-21T23:09:30.079222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:30.079271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:30.079284Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:09:30.079516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:30.079533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:30.079560Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:09:30.079600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 Backup "/Root" to "/home/runner/.ya/build/build_root/dl5p/0019ac/r3tmp/tmptOZ3rL/"Create temporary directory "/Root/~backup_20241121T230930"2024-11-21T23:09:30.100306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/~backup_20241121T230930, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:09:30.100501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:30.103088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/~backup_20241121T230930 2024-11-21T23:09:30 ... Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710663 2024-11-21T23:09:30.457373Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 10 2024-11-21T23:09:30.457578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710663 2024-11-21T23:09:30.457604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710663 2024-11-21T23:09:30.457613Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 3 2024-11-21T23:09:30.463629Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710663, at schemeshard: 72057594046644480 2024-11-21T23:09:30.471807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230570511, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:30.471842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710663:0, step: 1732230570511, at schemeshard: 72057594046644480 2024-11-21T23:09:30.471957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710663:0 progress is 1/1 2024-11-21T23:09:30.472039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710663:0 2024-11-21T23:09:30.472072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710663, publications: 2, subscribers: 1 2024-11-21T23:09:30.479251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:30.479515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:30.480872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046644480, cookie: 281474976710663 2024-11-21T23:09:30.480970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710663 2024-11-21T23:09:30.480984Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 11 2024-11-21T23:09:30.481157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710663 2024-11-21T23:09:30.481173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710663 2024-11-21T23:09:30.481182Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710663, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2024-11-21T23:09:30.481216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710663, subscribers: 1 2024-11-21T23:09:30.491490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/dl5p/0019ac/r3tmp/tmptOZ3rL/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/dl5p/0019ac/r3tmp/tmptOZ3rL/" to "/Root"Process "/home/runner/.ya/build/build_root/dl5p/0019ac/r3tmp/tmptOZ3rL/dir"Restore empty directory "/home/runner/.ya/build/build_root/dl5p/0019ac/r3tmp/tmptOZ3rL/dir" to "/Root/dir"2024-11-21T23:09:30.547507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/dir, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:09:30.547689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710664:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:30.563609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710664, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/dir 2024-11-21T23:09:30.563834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:30.564007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:30.564048Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710664:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:30.565611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710664 2024-11-21T23:09:30.565644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710664 2024-11-21T23:09:30.565657Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 12 2024-11-21T23:09:30.565838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710664 2024-11-21T23:09:30.565853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710664 2024-11-21T23:09:30.565862Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 2 2024-11-21T23:09:30.628459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710664, at schemeshard: 72057594046644480 2024-11-21T23:09:30.641190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230570686, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:30.641219Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710664:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732230570686, at schemeshard: 72057594046644480 2024-11-21T23:09:30.641336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710664:0 128 -> 240 2024-11-21T23:09:30.664363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:30.664596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:30.664641Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710664:0 ProgressState 2024-11-21T23:09:30.664695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2024-11-21T23:09:30.664723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710664:0 2024-11-21T23:09:30.664774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710664, publications: 2, subscribers: 1 2024-11-21T23:09:30.666637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046644480, cookie: 281474976710664 2024-11-21T23:09:30.666672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710664 2024-11-21T23:09:30.666684Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 13 2024-11-21T23:09:30.666842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710664 2024-11-21T23:09:30.666854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710664 2024-11-21T23:09:30.666862Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710664, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 3 2024-11-21T23:09:30.666888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 Restore ACL "/home/runner/.ya/build/build_root/dl5p/0019ac/r3tmp/tmptOZ3rL/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/dl5p/0019ac/r3tmp/tmptOZ3rL/dir/permissions.pb"2024-11-21T23:09:30.722653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/dir, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:09:30.722824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:30.722848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:09:30.722914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710665:0 progress is 1/1 2024-11-21T23:09:30.722993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710665:0 2024-11-21T23:09:30.723004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710665, publications: 2, subscribers: 0 2024-11-21T23:09:30.727251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/dir, set owner:root@builtin 2024-11-21T23:09:30.727390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:30.727568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:30.728679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710665 2024-11-21T23:09:30.728712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710665 2024-11-21T23:09:30.728724Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 4 2024-11-21T23:09:30.728937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046644480, cookie: 281474976710665 2024-11-21T23:09:30.728982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2024-11-21T23:09:30.728992Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2024-11-21T23:09:30.729028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 Restore completed successfully >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] Test command err: 2024-11-21T23:09:27.633737Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873638239518650:2152];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:27.639139Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019bd/r3tmp/tmpni6Ydq/pdisk_1.dat 2024-11-21T23:09:28.403976Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:28.416930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:28.417030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:28.436156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24311, node 1 2024-11-21T23:09:28.602909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:28.602930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:28.602938Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:28.603034Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:28.919593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:28.925459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:28.925513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:28.931637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:28.931892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:28.931908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:09:28.934161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:28.934186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:09:28.935985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:28.937401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:28.939916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230568985, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:28.939971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:28.940290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:28.942096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:28.942284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:28.942339Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:28.942429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:28.942466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:28.942529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:28.945624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:28.945697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:28.945714Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:28.945800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:09:31.437136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873655419388741:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:31.437257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:31.785592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:31.786136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:09:31.786703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:31.786727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:31.786762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSequence Propose, path: /Root/table/_serial_column_Key, opId: 281474976710658:1, at schemeshard: 72057594046644480 2024-11-21T23:09:31.786947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 2 -> 2 2024-11-21T23:09:31.787157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:31.791639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/table 2024-11-21T23:09:31.791877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:31.792172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:31.792252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 ProgressState, operation type: TxCreateSequence, at tablet72057594046644480 2024-11-21T23:09:31.792531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:09:31.793704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:31.793755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:31.793769Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:09:31.793966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:31.793982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:31.793991Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2024-11-21T23:09:31.794101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:31.794120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:31.794148Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 1 2024-11-21T23:09:31.799814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:09:31.800692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:09:31.800753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 2 -> 3 2024-11-21T23:09:31.801072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:09:31.801106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:09:31.803480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:09:31.882652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:1 3 -> 128 2024-11-21T23:09:31.884927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#281474976710658:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:31.925156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:09:31.925186Z node 1 :FLAT_TX_SCHE ... 281474976710671 2024-11-21T23:09:33.502980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T23:09:33.502999Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2024-11-21T23:09:33.503252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T23:09:33.503277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T23:09:33.503291Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 2 2024-11-21T23:09:33.503412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T23:09:33.503428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T23:09:33.503439Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 1 2024-11-21T23:09:33.503559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710671, at schemeshard: 72057594046644480 2024-11-21T23:09:33.512394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:1 3 -> 128 2024-11-21T23:09:33.512669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710671:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:09:33.512715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 2 -> 3 2024-11-21T23:09:33.515773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#281474976710671:1 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:33.515965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710671:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:09:33.544450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710671:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:09:33.544475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:09:33.544563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 3 -> 128 2024-11-21T23:09:33.547754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710671:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:09:33.554829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230573598, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:33.554869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710671:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230573598 2024-11-21T23:09:33.554962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 128 -> 129 2024-11-21T23:09:33.555032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateSequence TPropose operationId#281474976710671:1 HandleReply TEvOperationPlan, at schemeshard: 72057594046644480 2024-11-21T23:09:33.555098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:1 128 -> 240 2024-11-21T23:09:33.558385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:33.558848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:33.558919Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710671:1 ProgressState 2024-11-21T23:09:33.558976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710671:1 progress is 1/2 2024-11-21T23:09:33.559113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710671:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:09:33.561294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T23:09:33.561325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T23:09:33.561360Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 15 2024-11-21T23:09:33.561531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T23:09:33.561545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T23:09:33.561553Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 2024-11-21T23:09:33.561636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710671 2024-11-21T23:09:33.561655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710671 2024-11-21T23:09:33.561662Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710671, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2024-11-21T23:09:33.563934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037891 Status: COMPLETE TxId: 281474976710671 Step: 1732230573598 OrderId: 281474976710671 ExecLatency: 0 ProposeLatency: 8 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037891 CpuTimeUsec: 1290 } } 2024-11-21T23:09:33.564543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710671:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:09:33.564568Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:09:33.564582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710671:0 129 -> 240 2024-11-21T23:09:33.566286Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710671:0 ProgressState 2024-11-21T23:09:33.566353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710671:0 progress is 2/2 2024-11-21T23:09:33.566478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710671:0 2024-11-21T23:09:33.566576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710671:1 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/dl5p/0019bd/r3tmp/tmpbcBL9P/table/data_00.csv"2024-11-21T23:09:33.710051Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jd8fsvh49aeezzk7b8wyp7df, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZmOTVkYTUtOTk0ODkxYWYtNmQwZTgyMjUtNTQzYzBjNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/dl5p/0019bd/r3tmp/tmpbcBL9P/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/dl5p/0019bd/r3tmp/tmpbcBL9P/table/permissions.pb"2024-11-21T23:09:33.759499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/table, operationId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:09:33.759767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:33.759792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:09:33.759875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710673:0 progress is 1/1 2024-11-21T23:09:33.759998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710673:0 2024-11-21T23:09:33.760012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710673, publications: 3, subscribers: 0 2024-11-21T23:09:33.766430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710673, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/table, set owner:root@builtin 2024-11-21T23:09:33.766639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:33.767020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:33.769057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710673 2024-11-21T23:09:33.769107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976710673 2024-11-21T23:09:33.769124Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 3 2024-11-21T23:09:33.769426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710673 2024-11-21T23:09:33.769470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710673 2024-11-21T23:09:33.769483Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2024-11-21T23:09:33.769604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976710673 2024-11-21T23:09:33.769620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710673 2024-11-21T23:09:33.769630Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710673, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 16 2024-11-21T23:09:33.769660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710673, subscribers: 0 Restore completed successfully2024-11-21T23:09:33.922823Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jd8fsvq7chm71bywrpwzg87q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFmOWJiZTQtZDAzOTdlYzItODlmNGRmYzYtZWU2MzI2N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex >> CompressExecutor::TestReorderedExecutor >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |81.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} |81.6%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2024-11-21T23:09:37.752803Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.752831Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.752851Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.758705Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.766709Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.784769Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.786847Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:37.789795Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.789819Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.789837Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.790156Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.793456Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.793613Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.793891Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:37.794208Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:09:37.795301Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.795334Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.795362Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.810744Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.814539Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.814745Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.818627Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:37.819377Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.819739Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:37.819833Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:37.819872Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:09:37.821059Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.821082Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.821101Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.830788Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.831363Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.831606Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.834756Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 11 Compressed message data size: 31 2024-11-21T23:09:37.835760Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:37.835955Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:09:37.838443Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:09:37.838662Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:09:37.838772Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:37.838801Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:09:37.838839Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:09:37.838982Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2024-11-21T23:09:37.839087Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:09:37.839106Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:09:37.839124Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:09:37.839247Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2024-11-21T23:09:37.839300Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:09:37.839320Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:09:37.839339Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:09:37.839418Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2024-11-21T23:09:37.839464Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:09:37.839485Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:09:37.839507Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:09:37.839608Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2024-11-21T23:09:37.843188Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.843209Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.843261Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.843589Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.852720Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.852909Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.853230Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T23:09:37.854164Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:37.854418Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:09:37.875427Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:09:37.875663Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:09:37.876390Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:37.876437Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:09:37.876454Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:09:37.876486Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:09:37.876542Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:09:37.926241Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2024-11-21T23:09:37.926384Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:09:37.926442Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:09:37.926458Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:09:37.926473Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:09:37.926507Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:09:37.926658Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2024-11-21T23:09:37.931456Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.931495Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.931514Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.948233Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.957842Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.958055Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.958326Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:37.959469Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:37.960298Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:37.963280Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2024-11-21T23:09:37.963408Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:09:37.966542Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:37.966586Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:09:37.966606Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2024-11-21T23:09:37.966623Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2024-11-21T23:09:37.966664Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2024-11-21T23:09:37.966689Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T23:09:37.966879Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2024-11-21T23:09:37.967053Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2024-11-21T23:09:38.275439Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.275464Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.275491Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.275917Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:09:38.275967Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.275997Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.277422Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007662s 2024-11-21T23:09:38.278030Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.278604Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:09:38.278689Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.280595Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.280614Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.280631Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.280979Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:09:38.281017Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.281046Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.281099Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006818s 2024-11-21T23:09:38.281617Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.282013Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:09:38.282094Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.283005Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.283022Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.283037Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.283825Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:09:38.283874Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.283904Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.283986Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.189450s 2024-11-21T23:09:38.284425Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.285039Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:09:38.285121Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.285941Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.285957Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.285971Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.286343Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2024-11-21T23:09:38.286377Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.286425Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.286510Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.223958s 2024-11-21T23:09:38.287152Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.287559Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2024-11-21T23:09:38.287626Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.288501Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.288520Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.288538Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.288859Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.289211Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:38.298944Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.299446Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2024-11-21T23:09:38.299494Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.299512Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.299579Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.220794s 2024-11-21T23:09:38.299941Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:09:38.301366Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.301386Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.301420Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.301701Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.302139Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:38.302286Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.302720Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:38.405482Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.405897Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:09:38.405952Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:38.405995Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:09:38.406060Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:09:38.506441Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:09:38.506600Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2024-11-21T23:09:38.507630Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.507668Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.507698Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.508005Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.508433Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:38.508596Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.509044Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:38.611800Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.612002Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2024-11-21T23:09:38.612049Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:38.612085Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2024-11-21T23:09:38.612162Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2024-11-21T23:09:38.612263Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2024-11-21T23:09:38.612534Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2024-11-21T23:09:38.612609Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2024-11-21T23:09:38.612713Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> ObjectStorageListingTest::ListingNoFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2024-11-21T23:09:37.267865Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.267910Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.267950Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.268523Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.269158Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.284263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.284625Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:37.285467Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:37.287481Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:37.287655Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T23:09:37.287780Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:37.287864Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:37.287889Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T23:09:37.287923Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:09:37.287939Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:09:37.289237Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.289303Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.289328Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.289787Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.290355Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.290506Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.290702Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 10 Compressed message data size: 30 2024-11-21T23:09:37.291543Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:37.291731Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:09:37.292018Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:09:37.292201Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:09:37.292284Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:37.292312Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:09:37.292342Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:09:37.292505Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2024-11-21T23:09:37.292593Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:09:37.292612Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:09:37.292628Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:09:37.292748Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2024-11-21T23:09:37.292799Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2024-11-21T23:09:37.292816Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2024-11-21T23:09:37.292832Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:09:37.292895Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2024-11-21T23:09:37.292930Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2024-11-21T23:09:37.292947Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2024-11-21T23:09:37.292968Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:09:37.293046Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2024-11-21T23:09:37.294443Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.294470Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.294487Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:37.295363Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:37.295744Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:37.295884Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:37.296085Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 100 Compressed message data size: 91 2024-11-21T23:09:37.297031Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:09:37.297218Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2024-11-21T23:09:37.297568Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2024-11-21T23:09:37.297774Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2024-11-21T23:09:37.297875Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:37.297916Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:09:37.298021Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2024-11-21T23:09:37.298100Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:09:37.298130Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:09:37.298205Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2024-11-21T23:09:37.298257Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:09:37.298274Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } } 2024-11-21T23:09:37.298325Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2024-11-21T23:09:37.298381Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2024-11-21T23:09:37.298417Z :DEBUG: [db] [sessionid] [cluster] ... estTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:09:38.459716Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2024-11-21T23:09:38.540377Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:09:38.540419Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:09:38.540514Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.540827Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.541409Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:38.541653Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2024-11-21T23:09:38.541940Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2024-11-21T23:09:38.618163Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2024-11-21T23:09:38.618480Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:38.618523Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:09:38.618542Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:09:38.618558Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2024-11-21T23:09:38.618578Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2024-11-21T23:09:38.618596Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2024-11-21T23:09:38.618614Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2024-11-21T23:09:38.618633Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2024-11-21T23:09:38.618655Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2024-11-21T23:09:38.618683Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2024-11-21T23:09:38.618740Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2024-11-21T23:09:38.618903Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k": "v", "k1": "v1" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2024-11-21T23:09:38.621085Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2024-11-21T23:09:38.631484Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.631520Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.631543Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.632384Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.632794Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:38.632935Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.633754Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:38.634169Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2024-11-21T23:09:38.635864Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.635884Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.635930Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:38.636240Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:38.637156Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:38.637304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.637852Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:38.638010Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:38.638126Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:38.638172Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:09:38.638373Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageNoQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2024-11-21T23:09:36.593357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:36.596669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:36.596824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00197c/r3tmp/tmpjy0TAp/pdisk_1.dat 2024-11-21T23:09:37.285354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:09:37.342528Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:37.407358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:37.407616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:37.420148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:37.546544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:37.633640Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:09:37.633989Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:37.730065Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:37.730210Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:37.732188Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:09:37.732311Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:09:37.732371Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:09:37.732770Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:37.775206Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:09:37.775453Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:37.775599Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:09:37.775654Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:37.775699Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:09:37.775736Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:37.776768Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:09:37.776882Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:09:37.776968Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:09:37.777021Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:37.777061Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:37.777129Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:09:37.777175Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:37.777372Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:37.777690Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:37.777788Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:09:37.779763Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:37.793789Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:37.793948Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:09:38.012969Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:09:38.035865Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:09:38.035967Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:38.036575Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:38.036648Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:09:38.036713Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:09:38.037026Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:09:38.037222Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:09:38.038003Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:38.038084Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:09:38.042340Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:09:38.042896Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:38.048219Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:09:38.048295Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:38.049076Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:09:38.049157Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:09:38.049223Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:38.050930Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:38.050984Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:38.051055Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:09:38.051138Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:09:38.051198Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:09:38.051296Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:38.056026Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:38.062472Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:09:38.062743Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:09:38.062830Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:09:38.073662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:38.073814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:38.073886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:38.079732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:09:38.087050Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:38.318016Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:38.320837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:09:38.720901Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fszwq0njdedryb8bkssta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjIzMTEzZS1kZGUzYTgzNy0xZmIyMTc2My0xMjBjZWVhYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:09:38.727703Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T23:09:38.728151Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:38.740933Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:38.741071Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:38.744961Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T23:09:38.745226Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2024-11-21T23:09:38.745502Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2024-11-21T23:09:38.745743Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] >> ObjectStorageListingTest::FilterListing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2024-11-21T23:06:37.061676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872906843602414:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:37.061837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:06:37.087058Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439872907008710557:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:37.087113Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00240e/r3tmp/tmpwubnLK/pdisk_1.dat 2024-11-21T23:06:38.146869Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:38.192413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:06:38.521514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:38.521629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:38.524298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:38.524383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:38.527361Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:06:38.527774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:38.529433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:06:38.577338Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26456, node 1 2024-11-21T23:06:38.641807Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:06:38.883009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:38.883035Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:38.883048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:38.883172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:39.688386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:39.871887Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:42.059879Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872906843602414:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:42.059969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:06:42.090575Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439872907008710557:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:42.090638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:06:43.763435Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:43.763557Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872932613406864:2303], Start check tables existence, number paths: 2 2024-11-21T23:06:43.769046Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODhkYzhjMTMtMTNkMjZhNDktNzM1ODEwNDktMWI5MjdlNDk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODhkYzhjMTMtMTNkMjZhNDktNzM1ODEwNDktMWI5MjdlNDk= 2024-11-21T23:06:43.770160Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODhkYzhjMTMtMTNkMjZhNDktNzM1ODEwNDktMWI5MjdlNDk=, ActorId: [1:7439872932613406882:2306], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:43.770558Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2024-11-21T23:06:43.770596Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:43.770612Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:43.770679Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872932613406864:2303], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:43.770737Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872932613406864:2303], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:43.770776Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872932613406864:2303], Successfully finished 2024-11-21T23:06:43.796304Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:43.800810Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872932613406892:2536], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:43.804866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:43.810286Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872932613406892:2536], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T23:06:43.819026Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872932613406892:2536], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:06:43.834906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872932613406892:2536], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:06:43.898604Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872932613406892:2536], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:43.904521Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872932613406892:2536], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:06:43.907545Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjNkYzhmOTUtYTRkMGI3ZDQtNDllYmQ4YzMtZGQwMDNkNDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjNkYzhmOTUtYTRkMGI3ZDQtNDllYmQ4YzMtZGQwMDNkNDU= 2024-11-21T23:06:43.908301Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjNkYzhmOTUtYTRkMGI3ZDQtNDllYmQ4YzMtZGQwMDNkNDU=, ActorId: [1:7439872932613406974:2307], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:43.909010Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:06:43.909024Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:06:43.909079Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872932613406976:2308], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:43.909281Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjNkYzhmOTUtYTRkMGI3ZDQtNDllYmQ4YzMtZGQwMDNkNDU=, ActorId: [1:7439872932613406974:2307], ActorState: ReadyState, TraceId: 01jd8fmnt54vs1j144p3345310, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439872932613406973:2597] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T23:06:43.909329Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439872932613406974:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=YjNkYzhmOTUtYTRkMGI3ZDQtNDllYmQ4YzMtZGQwMDNkNDU= 2024-11-21T23:06:43.909370Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872932613406977:2309], Database: /Root, Start database fetching 2024-11-21T23:06:43.910489Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872932613406977:2309], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T23:06:43.910619Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872932613406976:2308], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:43.910685Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T23:06:43.910719Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T23:06:43.910739Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T23:06:43.911048Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872932613406988:2311], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T23:06:43.911103Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [Wo ... 3855Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:30.233884Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:30.233895Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:30.234036Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:30.249865Z node 11 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:30.694413Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:30.701909Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:09:30.727686Z node 11 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:09:34.998519Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7439873645399733404:2195];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:34.998610Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:36.644498Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:09:36.648632Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ= 2024-11-21T23:09:36.667027Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:09:36.667081Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:09:36.667138Z node 11 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:09:36.667172Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7439873675464504930:2304], Start check tables existence, number paths: 2 2024-11-21T23:09:36.667259Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:09:36.669229Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7439873675464504930:2304], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:09:36.669296Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7439873675464504930:2304], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:09:36.669339Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7439873675464504930:2304], Successfully finished 2024-11-21T23:09:36.669432Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:09:36.675505Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439873675464504957:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:09:36.691644Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:09:36.693685Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439873675464504957:2303], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2024-11-21T23:09:36.693883Z node 11 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439873675464504957:2303], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:09:36.730849Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439873675464504957:2303], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:09:36.794686Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439873675464504957:2303], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:09:36.798250Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439873675464504957:2303], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:09:36.802522Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2024-11-21T23:09:36.802571Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2024-11-21T23:09:36.802682Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439873675464505015:2307], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:09:36.804856Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439873675464505015:2307], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:09:36.804926Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2024-11-21T23:09:36.804955Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T23:09:36.805235Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7439873675464505024:2308], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T23:09:36.823723Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7439873675464505024:2308], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T23:09:36.838644Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T23:09:36.838682Z node 11 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:09:36.838744Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439873675464505036:2310], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T23:09:36.838941Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: ReadyState, TraceId: 01jd8fsyp56rv6zr7wvgs1h67b, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T23:09:36.845504Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439873675464505036:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:36.845639Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:36.857489Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:09:36.862209Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [11:7439873675464505024:2308], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2024-11-21T23:09:36.863514Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: ExecuteState, TraceId: 01jd8fsyp56rv6zr7wvgs1h67b, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [11:7439873675464505037:2305] WorkloadServiceCleanup: 0 2024-11-21T23:09:36.865919Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: CleanupState, TraceId: 01jd8fsyp56rv6zr7wvgs1h67b, EndCleanup, isFinal: 0 2024-11-21T23:09:36.865992Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: CleanupState, TraceId: 01jd8fsyp56rv6zr7wvgs1h67b, Sent query response back to proxy, proxyRequestId: 3, proxyId: [11:7439873645399733266:2060] 2024-11-21T23:09:36.890067Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:09:36.890137Z node 11 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:09:36.890178Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:09:36.890207Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:09:36.890291Z node 11 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=11&id=ZDI3ODc0MzktNWNiY2MzYmEtMTllZGM0MjEtOTBlYjg4NmQ=, ActorId: [11:7439873675464504931:2305], ActorState: unknown state, Session actor destroyed >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |81.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> TTxDataShardUploadRows::TestUploadRows >> TTxDataShardUploadRows::TestUploadRowsLocks-StreamLookup >> ObjectStorageListingTest::FilterListing [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsNoLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2024-11-21T23:09:44.491725Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:44.494977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:44.495128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001972/r3tmp/tmp9fjKyR/pdisk_1.dat 2024-11-21T23:09:44.990323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:09:45.043035Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:45.102526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:45.102699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:45.116752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:45.240161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:45.279347Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:09:45.279630Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:45.322081Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:45.322191Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:45.323745Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:09:45.323838Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:09:45.323903Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:09:45.324219Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:45.357558Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:09:45.357812Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:45.357964Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:09:45.358031Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:45.358076Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:09:45.358116Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:45.359073Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:09:45.359177Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:09:45.359281Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:09:45.359337Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:45.359380Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:45.359429Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:09:45.359473Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:45.359670Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:45.360004Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:45.360108Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:09:45.361871Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:45.372536Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:45.372665Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:09:45.577129Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:09:45.582006Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:09:45.582087Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:45.584461Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:45.584538Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:09:45.584615Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:09:45.584894Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:09:45.585052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:09:45.585724Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:45.585789Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:09:45.587865Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:09:45.588295Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:45.590195Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:09:45.590249Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:45.590894Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:09:45.590969Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:09:45.591030Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:45.592069Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:45.592110Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:45.592175Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:09:45.592234Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:09:45.592295Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:09:45.592374Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:45.596123Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:45.598211Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:09:45.598341Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:09:45.598448Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:09:45.607599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:45.607688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:45.607738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:45.611743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:09:45.617049Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:45.834338Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:45.836973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:09:46.196955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8ft7852ehk0wx5mz8qynwv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE2MGU4ZWEtYWQ2OWRhMDUtYjQ0ZDM4MWQtZjZiZDdkYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:09:46.210144Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T23:09:46.215710Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:46.228554Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:46.228734Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:46.232433Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T23:09:46.232667Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2024-11-21T23:09:46.232858Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2024-11-21T23:09:46.233080Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:824:2661], serverId# [1:825:2662], sessionId# [0:0:0] 2024-11-21T23:09:46.235152Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:830:2667], serverId# [1:831:2668], sessionId# [0:0:0] 2024-11-21T23:09:46.235373Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2024-11-21T23:09:46.235541Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2024-11-21T23:09:46.235708Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:830:2667], serverId# [1:831:2668], sessionId# [0:0:0] >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> THealthCheckTest::RedGroupIssueOnRedSpace [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] Test command err: diskMask# 121 nonWorkingDomain# 0 120750 diskMask# 121 nonWorkingDomain# 1 4320 diskMask# 122 nonWorkingDomain# 0 83184 diskMask# 122 nonWorkingDomain# 1 62544 diskMask# 123 nonWorkingDomain# 0 56460 diskMask# 123 nonWorkingDomain# 1 48192 diskMask# 124 nonWorkingDomain# 0 83184 diskMask# 124 nonWorkingDomain# 1 8640 diskMask# 125 nonWorkingDomain# 0 56460 diskMask# 125 nonWorkingDomain# 1 4320 diskMask# 126 nonWorkingDomain# 0 35496 diskMask# 126 nonWorkingDomain# 1 62544 diskMask# 127 nonWorkingDomain# 0 20640 diskMask# 127 nonWorkingDomain# 1 48192 diskMask# 128 nonWorkingDomain# 0 781920 diskMask# 128 nonWorkingDomain# 1 1088640 >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |81.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] |81.6%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |81.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |81.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:56.895929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:56.896007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:56.896043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:56.896084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:56.896151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:56.896182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:56.896257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:56.896607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:56.956708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:56.956747Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:56.965053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:56.965308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:56.965440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:56.975330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:56.975554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:56.976173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:56.976769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:56.981464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.982792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:56.982847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.982947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:56.982992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:56.983034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:56.983316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:56.996812Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:57.110446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:57.110696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.110881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:57.111095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:57.111156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.114885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:57.115049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:57.115344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.115420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:57.115490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:57.115531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:57.118530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.118594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:57.118635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:57.122465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.122539Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.122594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:57.122644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:57.126443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:57.128872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:57.129091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:57.130049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:57.130177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:57.130220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:57.130498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:57.130548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:57.130733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:57.130812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:57.132880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:57.132927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:57.133139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:57.133190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:57.133618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.133654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:57.133727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:57.133751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:57.133783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:57.133833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:57.133871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:57.133894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:57.133942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:57.133967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:57.133989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:49.776748Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:09:49.777028Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 312us result status StatusSuccess 2024-11-21T23:09:49.778012Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:49.795429Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:09:49.795576Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:714:2614] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T23:09:49.795768Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732230589745770 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732230589745770 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732230589745770 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:09:49.803753Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:786:2614] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T23:09:49.803930Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:714:2614] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] Test command err: 2024-11-21T23:09:27.936258Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873637692539529:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:27.936310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001a07/r3tmp/tmpTw1mcc/pdisk_1.dat 2024-11-21T23:09:28.612430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:28.612548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:28.614288Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:28.619035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4493, node 1 2024-11-21T23:09:28.795987Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:28.796013Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:28.796026Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:28.796134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:29.081380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.090702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:29.090783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.098263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:29.098538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:29.098551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:09:29.103311Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:29.103342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:09:29.105385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.106108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:29.110337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230569153, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:29.110420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:29.110737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:29.117077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:29.117242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:29.117284Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:29.117350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:29.117395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:29.117452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:29.120195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:29.120872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:29.120919Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:29.121048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:09:32.267115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873654872409535:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:32.278116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:32.303185Z node 1 :TX_PROXY DEBUG: actor# [1:7439873637692539550:2133] Handle TEvProposeTransaction 2024-11-21T23:09:32.303220Z node 1 :TX_PROXY DEBUG: actor# [1:7439873637692539550:2133] TxId# 281474976710658 ProcessProposeTransaction 2024-11-21T23:09:32.303267Z node 1 :TX_PROXY DEBUG: actor# [1:7439873637692539550:2133] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7439873659167376857:2626] 2024-11-21T23:09:32.392124Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2024-11-21T23:09:32.392531Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:09:32.392604Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:09:32.392752Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:09:32.392870Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:09:32.392922Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2024-11-21T23:09:32.393053Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] txid# 281474976710658 HANDLE EvClientConnected 2024-11-21T23:09:32.393420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:32.393931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:09:32.395175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:32.395220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:32.401665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2024-11-21T23:09:32.401858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:32.402053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:32.402113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:09:32.402543Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2024-11-21T23:09:32.402601Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873659167376857:2626] txid# 281474976710658 SEND to# [1:7439873659167376856:2304] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2024-11-21T23:09:32.404545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:32.404604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:32.404620Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:09:32.404876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:32.404899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:32.404911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:09:32.407716Z node 1 :FLAT_TX_SCHEMESHARD ... : 1732230589586, at schemeshard: 72057594046644480 2024-11-21T23:09:49.541828Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 129 2024-11-21T23:09:49.544477Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:49.544772Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:49.544835Z node 7 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 281474976710761:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:49.545727Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710761 2024-11-21T23:09:49.545773Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710761 2024-11-21T23:09:49.545796Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710761, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 REQUEST: HEAD /test_bucket/table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:22563 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EE73272F-3A30-4E23-ACB4-EDFFF55CF3AB amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20241121/ru-central1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=30050d54cb5b50429a73393d3c5d13093b1c1a28e773986731b5e9a606e2f01b content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20241121T230949Z S3_MOCK::HttpServeRead: /test_bucket/table/data_00.csv / 28 REQUEST: GET /test_bucket/table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:22563 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 98CB4D21-2847-4B21-95C2-F672C59DD64E amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20241121/ru-central1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=a796e7ae118c98d2e64f6f78499c2684ff7c3721c3afd774abf9aac16bb85c5c content-type: application/xml range: bytes=0-27 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20241121T230949Z S3_MOCK::HttpServeRead: /test_bucket/table/data_00.csv / 28 2024-11-21T23:09:49.649618Z node 7 :FLAT_TX_SCHEMESHARD INFO: TRestore TProposedWaitParts, opId: 281474976710761:0 HandleReply TEvSchemaChanged at tablet# 72057594046644480 message# Source { RawX1: 7439873730311479034 RawX2: 4503629692143925 } Origin: 72075186224037891 State: 2 TxId: 281474976710761 Step: 0 Generation: 1 OpResult { Success: true Explain: "" BytesProcessed: 56 RowsProcessed: 7 } 2024-11-21T23:09:49.649660Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710761:0, at schemeshard: 72057594046644480 2024-11-21T23:09:49.649710Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 129 -> 240 2024-11-21T23:09:49.649861Z node 7 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TRestore, opId# 281474976710761:0, reason# domain is not a serverless db, domain# /Root, domainPathId# [OwnerId: 72057594046644480, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046644480, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:09:49.652716Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710761:0 ProgressState 2024-11-21T23:09:49.652793Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2024-11-21T23:09:49.652830Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2024-11-21T23:09:49.654174Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2024-11-21T23:09:49.899343Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7439873730311479206:2365] [0] Resolve database: name# /Root 2024-11-21T23:09:49.899800Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7439873730311479206:2365] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:09:49.899830Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7439873730311479206:2365] [0] Send request: schemeShardId# 72057594046644480 2024-11-21T23:09:49.900636Z node 7 :TX_PROXY DEBUG: [GetImport] [7:7439873730311479206:2365] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715666 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:22563" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1732230589 } EndTime { seconds: 1732230589 } } 2024-11-21T23:09:49.911850Z node 7 :TX_PROXY DEBUG: actor# [7:7439873704541673542:2135] Handle TEvNavigate describe path /Root/table 2024-11-21T23:09:49.911891Z node 7 :TX_PROXY DEBUG: Actor# [7:7439873730311479215:3459] HANDLE EvNavigateScheme /Root/table 2024-11-21T23:09:49.912051Z node 7 :TX_PROXY DEBUG: Actor# [7:7439873730311479215:3459] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T23:09:49.912157Z node 7 :TX_PROXY DEBUG: Actor# [7:7439873730311479215:3459] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false ReturnSetVal: true } 2024-11-21T23:09:49.913230Z node 7 :TX_PROXY DEBUG: Actor# [7:7439873730311479215:3459] Handle TEvDescribeSchemeResult Forward to# [7:7439873730311479213:2366] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 11 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1732230589544 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "Key" Type: "Int32" TypeId: 1 Id: 1 DefaultFromSequence: "_serial_column_Key" NotNull: true IsBuildInProgress: false } Columns { Name: "Value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 1 IsBackup: false Sequences { Name: "_serial_column_Key" PathId { OwnerId: 72057594046644480 LocalId: 12 } Version: 1 SequenceShard: 72075186224037888 MinValue: 1 MaxValue: 2147483647 StartValue: 1 Cache: 1 Increment: 1 Cycle: false SetVal { NextValue: 8 NextUsed: false } DataType: "Int64" } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046644480 2024-11-21T23:09:50.053317Z node 7 :TX_PROXY DEBUG: actor# [7:7439873704541673542:2135] Handle TEvExecuteKqpTransaction 2024-11-21T23:09:50.053353Z node 7 :TX_PROXY DEBUG: actor# [7:7439873704541673542:2135] TxId# 281474976715667 ProcessProposeKqpTransaction 2024-11-21T23:09:50.054635Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd8ftbf2d9y7q9yjkvak0k3j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZmJmOTQ4MzgtZWRiMjhhNGMtNDgyZmFmMTEtYTVkMGU1YzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TTxDataShardUploadRows::TestUploadRowsLocks-StreamLookup [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> YdbSdkSessionsPool::StressTestSync10 >> YdbSdkSessionsPool::StressTestAsync1 >> YdbSdkSessionsPool::StressTestSync1 >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> THealthCheckTest::ShardsNoLimit [GOOD] >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2024-11-21T23:09:33.986497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:33.986723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:33.986801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c26/r3tmp/tmpeFQK75/pdisk_1.dat 2024-11-21T23:09:34.372371Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15106, node 1 TClient is connected to server localhost:7792 2024-11-21T23:09:34.968249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:34.968308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:34.968339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:34.969111Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:39.241573Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:451:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:39.241859Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:39.241972Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c26/r3tmp/tmpgXJ6N3/pdisk_1.dat 2024-11-21T23:09:39.543037Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15713, node 3 TClient is connected to server localhost:14245 2024-11-21T23:09:39.984624Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:39.984693Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:39.984735Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:39.985307Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:45.932921Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:45.933190Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:45.933369Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c26/r3tmp/tmpxHIy5X/pdisk_1.dat 2024-11-21T23:09:46.245840Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23227, node 6 TClient is connected to server localhost:29980 2024-11-21T23:09:46.707889Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:46.707955Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:46.708001Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:46.708249Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:54.904976Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:635:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:54.905346Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:54.905625Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:54.906610Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:633:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:54.907092Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:54.907196Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c26/r3tmp/tmpOZkAXQ/pdisk_1.dat 2024-11-21T23:09:55.411544Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19492, node 8 TClient is connected to server localhost:26726 2024-11-21T23:09:56.173481Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:56.173566Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:56.173602Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:56.174093Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> YdbSdkSessionsPool::WaitQueue10 >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsLimit800 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:40.306861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:40.306959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:40.307005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:40.307047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:40.307093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:40.307121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:40.307177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:40.307497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:40.386505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:40.386573Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:40.396904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:40.397341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:40.397531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:40.409773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:40.410187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:40.410826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:40.415155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:40.419338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:40.420619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:40.420685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:40.420754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:40.420811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:40.420855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:40.421095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:40.431890Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:40.566241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:40.567130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.567331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:40.567486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:40.567533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.569906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:40.570021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:40.570195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.570246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:40.570300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:40.570330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:40.572182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.572223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:40.572292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:40.573695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.573742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.573798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:40.573841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:40.577463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:40.579550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:40.579753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:40.580725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:40.580868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:40.580921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:40.581171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:40.581225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:40.581432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:40.581521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:40.583527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:40.583584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:40.583754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:40.583793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:40.584165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:40.584212Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:40.584307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:40.584347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:40.584404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:40.584454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:40.584497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:40.584550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:40.584613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:40.584657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:40.584691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : 72057594046678944 2024-11-21T23:10:00.312571Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:10:00.312735Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:10:00.312926Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:10:00.312966Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2024-11-21T23:10:00.313027Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:201:2204], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2024-11-21T23:10:00.313546Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T23:10:00.313597Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:10:00.314690Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T23:10:00.314828Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T23:10:00.314867Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T23:10:00.314902Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T23:10:00.314940Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 FAKE_COORDINATOR: Erasing txId 1002 2024-11-21T23:10:00.316142Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T23:10:00.316223Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2024-11-21T23:10:00.316254Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2024-11-21T23:10:00.316287Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:10:00.316337Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:10:00.316403Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2024-11-21T23:10:00.318895Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1128 } } 2024-11-21T23:10:00.318940Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T23:10:00.319062Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1128 } } 2024-11-21T23:10:00.319156Z node 72 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1128 } } 2024-11-21T23:10:00.321318Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T23:10:00.321363Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2024-11-21T23:10:00.321473Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T23:10:00.321515Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:10:00.321635Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 309237647625 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2024-11-21T23:10:00.321700Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:10:00.321734Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T23:10:00.321768Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:10:00.321806Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2024-11-21T23:10:00.323094Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T23:10:00.323184Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2024-11-21T23:10:00.325285Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T23:10:00.325423Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T23:10:00.325533Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2024-11-21T23:10:00.325573Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2024-11-21T23:10:00.325692Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2024-11-21T23:10:00.325725Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T23:10:00.325764Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2024-11-21T23:10:00.325804Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2024-11-21T23:10:00.325843Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2024-11-21T23:10:00.325870Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2024-11-21T23:10:00.325990Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2024-11-21T23:10:00.328940Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2024-11-21T23:10:00.328989Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2024-11-21T23:10:00.329320Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2024-11-21T23:10:00.329404Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2024-11-21T23:10:00.329439Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:398:2373] TestWaitNotification: OK eventTxId 1002 2024-11-21T23:10:00.329844Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:10:00.330029Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 227us result status StatusSuccess 2024-11-21T23:10:00.330497Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2024-11-21T23:09:50.905814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:50.909313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:50.909485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00135c/r3tmp/tmpZggQrp/pdisk_1.dat 2024-11-21T23:09:51.773260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:09:51.830816Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:51.942591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:51.942730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:51.969231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:52.113608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:52.172297Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:09:52.173451Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:09:52.176623Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:09:52.176879Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:52.223630Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:09:52.224509Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:52.224619Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:52.227959Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:09:52.228069Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:09:52.228142Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:09:52.229554Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:52.267518Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:09:52.267756Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:52.267883Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:09:52.267938Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:52.267973Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:09:52.268040Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.268532Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:09:52.268587Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:09:52.269172Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:09:52.269293Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:09:52.269425Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:09:52.269463Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:09:52.269507Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:09:52.269565Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.269606Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.269645Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:09:52.269682Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:09:52.269721Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:09:52.269769Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:09:52.269815Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:52.269951Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:09:52.269990Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:09:52.270091Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:52.272948Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:09:52.275960Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:52.276170Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:09:52.276260Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:09:52.276349Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:09:52.276409Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:09:52.276450Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:09:52.276793Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:09:52.276861Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:09:52.276903Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:09:52.276948Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:09:52.277021Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:09:52.277074Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:09:52.277126Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:09:52.277166Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:09:52.277197Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:09:52.279813Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:09:52.279883Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:52.290860Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:52.291008Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:09:52.291049Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:09:52.291111Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:09:52.291198Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:09:52.486043Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:09:52.486102Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:09:52.486138Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:09:52.486271Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:09:52.486324Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:09:52.486488Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:09:52.486529Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:09:52.486571Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:09:52.486607Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:09:52.504878Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:09:52.504976Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.505541Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:09:52.505581Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:09:52.505626Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.505665Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:09:52.505707Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:09:52.505774Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... e execution plan for [3000:281474976715667] at 72075186224037890 executing on unit ReadTableScan 2024-11-21T23:09:55.437573Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompleteOperation 2024-11-21T23:09:55.437616Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2024-11-21T23:09:55.437815Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2024-11-21T23:09:55.437847Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2024-11-21T23:09:55.437875Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T23:09:55.437900Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2024-11-21T23:09:55.437928Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2024-11-21T23:09:55.437968Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T23:09:55.437989Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2024-11-21T23:09:55.438020Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:55.438047Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2024-11-21T23:09:55.438074Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T23:09:55.438103Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T23:09:55.448875Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T23:09:55.448930Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T23:09:55.448968Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2024-11-21T23:09:55.449016Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1076:2874], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:09:55.449057Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2024-11-21T23:09:59.792376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:59.792565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:59.792742Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00135c/r3tmp/tmpDQkaxE/pdisk_1.dat 2024-11-21T23:10:00.072409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:00.102084Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:00.152347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:00.152492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:00.167697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:00.303902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:00.337590Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:630:2536] 2024-11-21T23:10:00.337879Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:10:00.393312Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:10:00.393434Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:10:00.394882Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:10:00.394964Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:10:00.395044Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:10:00.395343Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:10:00.395396Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:10:00.395483Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:10:00.395571Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:647:2545] 2024-11-21T23:10:00.395609Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:00.395647Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:10:00.395684Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:00.396549Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:10:00.396639Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:10:00.396702Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:628:2534], serverId# [2:638:2540], sessionId# [0:0:0] 2024-11-21T23:10:00.396761Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:00.396802Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:00.396848Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:10:00.396889Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:00.397072Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:00.397341Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:10:00.397431Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:10:00.400603Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:10:00.412401Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:00.412515Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:10:00.602783Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:665:2557], serverId# [2:667:2559], sessionId# [0:0:0] 2024-11-21T23:10:00.603538Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:10:00.603595Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:00.604559Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:00.604616Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:10:00.604668Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:10:00.605400Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:10:00.605563Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:10:00.605829Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:00.605888Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:10:00.620295Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:10:00.620706Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:00.622025Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:10:00.622068Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:00.622976Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:10:00.623048Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:10:00.623118Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:00.624245Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:00.624308Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:00.624388Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:10:00.624459Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:10:00.624517Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:10:00.624618Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:00.625175Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:10:00.627056Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:10:00.627120Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:10:00.628116Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:10:00.638752Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:701:2585], serverId# [2:702:2586], sessionId# [0:0:0] 2024-11-21T23:10:00.638894Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> YdbSdkSessionsPool::Get1Session >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> YdbSdkSessionsPool::RunSmallPlan >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2024-11-21T23:09:53.224014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:53.226510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:53.226665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001340/r3tmp/tmpEtIlQQ/pdisk_1.dat 2024-11-21T23:09:53.605357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:09:53.653852Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:53.714209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:53.714344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:53.726600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:53.861562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:53.902374Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:09:53.903435Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:09:53.903888Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:09:53.904141Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:53.946465Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:09:53.947228Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:53.947336Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:53.948978Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:09:53.949065Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:09:53.949138Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:09:53.949493Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:53.979694Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:09:53.979898Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:53.980017Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:09:53.980055Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:53.980087Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:09:53.980137Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:53.980603Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:09:53.980656Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:09:53.981207Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:09:53.981309Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:09:53.981423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:09:53.981459Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:09:53.981495Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:09:53.981547Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:53.981586Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:53.981622Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:09:53.981656Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:09:53.981686Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:09:53.981736Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:09:53.981786Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:53.981906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:09:53.981942Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:09:53.982055Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:53.982304Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:09:53.982365Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:53.982496Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:09:53.982555Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:09:53.982604Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:09:53.982642Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:09:53.982673Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:09:53.982934Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:09:53.982969Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:09:53.983007Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:09:53.983036Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:09:53.983087Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:09:53.983126Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:09:53.983162Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:09:53.983192Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:09:53.983222Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:09:53.984527Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:09:53.984580Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:53.996829Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:53.996972Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:09:53.997010Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:09:53.997077Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:09:53.997151Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:09:54.203842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:09:54.203903Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:09:54.203937Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:09:54.204055Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:09:54.204089Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:09:54.204216Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:09:54.204259Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:09:54.204293Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:09:54.204325Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:09:54.218124Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:09:54.218205Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:54.218792Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:09:54.218888Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:09:54.218944Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:54.218984Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:09:54.219020Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:09:54.219075Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 2Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2024-11-21T23:10:03.600283Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2024-11-21T23:10:03.601063Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:921:2745], Recipient [2:921:2745]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:03.601122Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:03.601194Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:10:03.601249Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:10:03.601306Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2024-11-21T23:10:03.601342Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2024-11-21T23:10:03.601392Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2024-11-21T23:10:03.601455Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2024-11-21T23:10:03.601494Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2024-11-21T23:10:03.601528Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T23:10:03.601560Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:10:03.602066Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2024-11-21T23:10:03.602114Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T23:10:03.602161Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:10:03.602205Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:10:03.602247Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2024-11-21T23:10:03.602274Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:10:03.602311Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2024-11-21T23:10:03.602354Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:03.602384Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:10:03.602480Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T23:10:03.602540Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T23:10:03.619452Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:10:03.619550Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:10:03.619598Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:10:03.619694Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1109:2906], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:10:03.619770Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:10:03.620225Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1109:2906], Recipient [2:926:2747]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2024-11-21T23:10:03.620282Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T23:10:03.620407Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037890 step# 3500 txid# 281474976715668} 2024-11-21T23:10:03.620455Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2024-11-21T23:10:03.620624Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T23:10:03.620671Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T23:10:03.620877Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:926:2747], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:03.620920Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:03.620976Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T23:10:03.621020Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:10:03.621066Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2024-11-21T23:10:03.621101Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2024-11-21T23:10:03.621140Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2024-11-21T23:10:03.621180Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T23:10:03.621215Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2024-11-21T23:10:03.621250Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2024-11-21T23:10:03.621284Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2024-11-21T23:10:03.621563Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2024-11-21T23:10:03.621598Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:10:03.621632Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T23:10:03.621664Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2024-11-21T23:10:03.621738Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T23:10:03.622779Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1131:2925], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T23:10:03.622827Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T23:10:03.623152Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2024-11-21T23:10:03.623911Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T23:10:03.625908Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2024-11-21T23:10:03.625968Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2024-11-21T23:10:03.723209Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2024-11-21T23:10:03.723281Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2024-11-21T23:10:03.723543Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:926:2747], Recipient [2:926:2747]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:03.723603Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:03.723673Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2024-11-21T23:10:03.723714Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:10:03.723757Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2024-11-21T23:10:03.723790Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2024-11-21T23:10:03.723826Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2024-11-21T23:10:03.723868Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T23:10:03.723903Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2024-11-21T23:10:03.723936Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2024-11-21T23:10:03.723966Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2024-11-21T23:10:03.724181Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2024-11-21T23:10:03.724215Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2024-11-21T23:10:03.724246Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T23:10:03.724275Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2024-11-21T23:10:03.724325Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2024-11-21T23:10:03.724350Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T23:10:03.724384Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2024-11-21T23:10:03.724416Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:03.724445Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2024-11-21T23:10:03.724477Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T23:10:03.724507Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T23:10:03.735667Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T23:10:03.735745Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2024-11-21T23:10:03.735787Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2024-11-21T23:10:03.735855Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1109:2906], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:03.735915Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] |81.7%| [TA] $(B)/ydb/core/erasure/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks+StreamLookup [GOOD] Test command err: 2024-11-21T23:09:50.904160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:50.909967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:50.910135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001365/r3tmp/tmp4Ugvyk/pdisk_1.dat 2024-11-21T23:09:51.751539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:09:51.860289Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:51.942308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:51.942544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:51.970037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:52.113731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:52.180991Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:646:2546] 2024-11-21T23:09:52.181276Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:52.250352Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:52.256788Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:52.259039Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:09:52.259156Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:09:52.259257Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:09:52.259735Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:52.302941Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:09:52.303147Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:52.303276Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:680:2565] 2024-11-21T23:09:52.303337Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:52.303397Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:09:52.303458Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.305245Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:652:2548] 2024-11-21T23:09:52.305492Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:52.315298Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:09:52.315437Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:09:52.315675Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:637:2540], serverId# [1:678:2563], sessionId# [0:0:0] 2024-11-21T23:09:52.315908Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.315950Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.315991Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:09:52.316038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:52.316347Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:52.316637Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:52.316736Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:09:52.317841Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:52.318047Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:52.319433Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T23:09:52.319511Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T23:09:52.319556Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T23:09:52.319856Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:52.319908Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T23:09:52.320027Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:52.320118Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:693:2573] 2024-11-21T23:09:52.320146Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T23:09:52.320171Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T23:09:52.320200Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:09:52.323808Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T23:09:52.323910Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T23:09:52.324525Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:09:52.324567Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.324600Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T23:09:52.324634Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:09:52.325493Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:638:2541], serverId# [1:686:2567], sessionId# [0:0:0] 2024-11-21T23:09:52.325907Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T23:09:52.326074Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:52.326179Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T23:09:52.326837Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:656:2550] 2024-11-21T23:09:52.327069Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:52.340482Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:52.340781Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:52.342289Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2024-11-21T23:09:52.342358Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2024-11-21T23:09:52.342431Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2024-11-21T23:09:52.342752Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:52.342805Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2024-11-21T23:09:52.342892Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:52.342965Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:721:2586] 2024-11-21T23:09:52.343003Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2024-11-21T23:09:52.343027Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2024-11-21T23:09:52.343052Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2024-11-21T23:09:52.343474Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:658:2552] 2024-11-21T23:09:52.343678Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:52.353241Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2024-11-21T23:09:52.353352Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2024-11-21T23:09:52.353745Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2024-11-21T23:09:52.353785Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.353829Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2024-11-21T23:09:52.353867Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2024-11-21T23:09:52.354364Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [1:640:2543], serverId# [1:694:2574], sessionId# [0:0:0] 2024-11-21T23:09:52.354718Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:52.354808Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T23:09:52.354941Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2024-11-21T23:09:52.355123Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037891 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:52.355195Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037891 2024-11-21T23:09:52.355601Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:52.355714Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:52.356972Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2024-11-21T23:09:52.357059Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2024-11-21T23:09:52.357111Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2024-11-21T23:09:52.357394Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:52.357443Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2024-11-21T23:09:52.357509Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:52.357589Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:727:2589] 2024-11-21T23:09:52.357618Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 ... TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:10:03.649809Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:10:03.649918Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:10:03.650031Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:647:2545] 2024-11-21T23:10:03.650073Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:03.650109Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:10:03.650149Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:03.650708Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:10:03.650826Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:10:03.651724Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T23:10:03.651869Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:03.651922Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:03.651972Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:10:03.652031Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:03.652154Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:03.652488Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:10:03.652584Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:10:03.654382Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:10:03.666046Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:03.666189Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:10:03.872081Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T23:10:03.872765Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:10:03.872829Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:03.874152Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:03.874210Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:10:03.874263Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:10:03.875027Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:10:03.875203Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:10:03.875554Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:03.875624Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:10:03.876119Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:10:03.876596Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:03.878351Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:10:03.878474Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:03.879254Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:10:03.879334Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:10:03.879405Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:03.880324Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:03.880372Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:03.880430Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:10:03.880510Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:10:03.880568Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:10:03.880669Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:03.881729Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:10:03.883502Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:10:03.884146Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:10:03.884211Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:10:03.896219Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:03.896347Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:03.896732Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:03.904201Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:10:03.910848Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:10:04.128134Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:10:04.131423Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:10:04.582139Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fts3p4brd3xsysvfqhd6s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTEwZDIyZWQtYWNjOWJiOWUtNDUyN2JjYWItZTFlYjExYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:04.587835Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T23:10:04.588088Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:04.602800Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:04.602964Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:04.764430Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8ftsv1606cvt435sbphmhe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODcxMjdjZmEtYTMzODNkN2ItZGNhYWU0MWQtNGQyZDI0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:04.768251Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint32_value: 300 } } 2024-11-21T23:10:04.776701Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2024-11-21T23:10:04.791065Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2024-11-21T23:10:04.791159Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:04.791279Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2024-11-21T23:10:04.792095Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2024-11-21T23:10:04.792182Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:04.863252Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8ftszs7qv1995fn8h0zqf1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODcxMjdjZmEtYTMzODNkN2ItZGNhYWU0MWQtNGQyZDI0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:04.863804Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:04.875962Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:04.876118Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:04.896986Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ODcxMjdjZmEtYTMzODNkN2ItZGNhYWU0MWQtNGQyZDI0ZjI=, ActorId: [3:821:2658], ActorState: ExecuteState, TraceId: 01jd8ftszs7qv1995fn8h0zqf1, Create QueryResponse for error on request, msg: 2024-11-21T23:10:04.898089Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8ftszs7qv1995fn8h0zqf1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODcxMjdjZmEtYTMzODNkN2ItZGNhYWU0MWQtNGQyZDI0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:04.898636Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:04.899206Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:04.899274Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019db/r3tmp/tmpJbHB4H/pdisk_1.dat 2024-11-21T23:09:28.384574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:28.704474Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:28.734279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:28.734378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:28.740546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15580, node 1 2024-11-21T23:09:29.111038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:29.111069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:29.111078Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:29.111184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:29.689448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.695434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:29.695503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.699923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:29.700222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:29.700242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:09:29.703109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:29.703142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:09:29.705222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.709175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:29.717788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230569755, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:29.717849Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:29.718202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:29.725002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:29.725287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:29.725401Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:29.725520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:29.725570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:29.725622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:29.728691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:29.728751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:29.728773Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:29.728855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:09:32.848027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873659615483670:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:32.859447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:32.875466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:32.875936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:09:32.876454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:32.876481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:32.883140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2024-11-21T23:09:32.883352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:32.883550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:32.883623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:09:32.885505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:32.885561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:32.885575Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:09:32.885771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:32.885802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:32.885811Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:09:32.887016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:09:32.911140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:09:32.911227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:09:32.917792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:09:33.051317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:09:33.051343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:09:33.051423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:09:33.055115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:09:33.063365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230573101, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:33.063420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230573101 2024-11-21T23:09:33.063535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:09:33.067336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:33.067622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:33.067676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:09:33.069967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:33.069998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:33.070011Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 5 2024-11-21T23:09:33.070244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 7205759404664448 ... TAlterTable TPropose operationId#281474976710759:2 HandleReply TEvOperationPlan, operationId: 281474976710759:2, stepId: 1732230603614, at schemeshard: 72057594046644480 2024-11-21T23:10:03.567935Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 128 -> 129 2024-11-21T23:10:03.569999Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:03.570728Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:03.570801Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:1 ProgressState 2024-11-21T23:10:03.570903Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:1 progress is 1/3 2024-11-21T23:10:03.571100Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 ProgressState at tablet: 72057594046644480 2024-11-21T23:10:03.571261Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:10:03.572860Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710759 2024-11-21T23:10:03.572912Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2024-11-21T23:10:03.572926Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 6 2024-11-21T23:10:03.573108Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710759 2024-11-21T23:10:03.573126Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2024-11-21T23:10:03.573133Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2024-11-21T23:10:03.573206Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710759 2024-11-21T23:10:03.573216Z node 13 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710759 2024-11-21T23:10:03.573223Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710759, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 4 2024-11-21T23:10:03.577464Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037892 Status: COMPLETE TxId: 281474976710759 Step: 1732230603614 OrderId: 281474976710759 ExecLatency: 0 ProposeLatency: 9 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037892 CpuTimeUsec: 1145 } } 2024-11-21T23:10:03.578538Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:03.578582Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T23:10:03.578608Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T23:10:03.581674Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:0 ProgressState 2024-11-21T23:10:03.581815Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 2/3 2024-11-21T23:10:03.584409Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037893 Status: COMPLETE TxId: 281474976710759 Step: 1732230603614 OrderId: 281474976710759 ExecLatency: 8 ProposeLatency: 14 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037893 CpuTimeUsec: 1495 } } 2024-11-21T23:10:03.585428Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710759:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:03.585478Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:2, at schemeshard: 72057594046644480 2024-11-21T23:10:03.585507Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:2 129 -> 240 2024-11-21T23:10:03.588880Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710759:2 ProgressState 2024-11-21T23:10:03.589022Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:2 progress is 3/3 2024-11-21T23:10:03.589097Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T23:10:03.589217Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:1 2024-11-21T23:10:03.589232Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:2 2024-11-21T23:10:03.592113Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T23:10:03.596898Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715667, at schemeshard: 72057594046644480 2024-11-21T23:10:03.598536Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976710760:0, path# /Root/table 2024-11-21T23:10:03.598748Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710760:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:03.602456Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2024-11-21T23:10:03.602572Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2024-11-21T23:10:03.602798Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 ProgressState 2024-11-21T23:10:03.607442Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046644480 2024-11-21T23:10:03.609804Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230603656, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:03.609859Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 1732230603656 2024-11-21T23:10:03.609897Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2024-11-21T23:10:03.611955Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710760:0 ProgressState 2024-11-21T23:10:03.612099Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2024-11-21T23:10:03.612175Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2024-11-21T23:10:03.614491Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 Restore ACL "/home/runner/.ya/build/build_root/dl5p/0019db/r3tmp/tmpEdJLe2/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/dl5p/0019db/r3tmp/tmpEdJLe2/table/permissions.pb"2024-11-21T23:10:03.776068Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/table, operationId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T23:10:03.776350Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715669:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:03.776387Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 2024-11-21T23:10:03.776473Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715669:0 progress is 1/1 2024-11-21T23:10:03.776619Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715669:0 2024-11-21T23:10:03.776634Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715669, publications: 4, subscribers: 0 2024-11-21T23:10:03.780291Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715669, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/table, set owner:root@builtin 2024-11-21T23:10:03.780498Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:03.781247Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:03.785215Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715669 2024-11-21T23:10:03.785272Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 4, at schemeshard: 72057594046644480, txId: 281474976715669 2024-11-21T23:10:03.785295Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2024-11-21T23:10:03.785545Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715669 2024-11-21T23:10:03.785565Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046644480, txId: 281474976715669 2024-11-21T23:10:03.785577Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2024-11-21T23:10:03.785691Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715669 2024-11-21T23:10:03.785734Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715669 2024-11-21T23:10:03.785747Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 9], version: 7 2024-11-21T23:10:03.785879Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046644480, cookie: 281474976715669 2024-11-21T23:10:03.785901Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715669 2024-11-21T23:10:03.785911Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715669, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 14 2024-11-21T23:10:03.785947Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715669, subscribers: 0 Restore completed successfully >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2024-11-21T23:09:28.064054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873642680837753:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:28.064099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019f4/r3tmp/tmpggiUc2/pdisk_1.dat 2024-11-21T23:09:28.960974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:28.961076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:28.975497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:28.983079Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1779, node 1 2024-11-21T23:09:29.302171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:29.302193Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:29.302200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:29.302277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:29.911800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.934443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:29.934498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.936511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:29.936719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:29.936744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:09:29.949896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:29.949928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:09:29.959554Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:29.962207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:29.965340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230570007, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:29.965403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:29.965659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:29.967896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:29.968081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:29.968130Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:29.968218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:29.968252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:29.968295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:29.973776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:29.973828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:29.973843Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:29.973929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:09:33.260935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873659860707990:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:33.261373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:33.273332Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873642680837753:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:33.277011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:33.293372Z node 1 :TX_PROXY DEBUG: actor# [1:7439873642680837982:2133] Handle TEvProposeTransaction 2024-11-21T23:09:33.293424Z node 1 :TX_PROXY DEBUG: actor# [1:7439873642680837982:2133] TxId# 281474976710658 ProcessProposeTransaction 2024-11-21T23:09:33.293478Z node 1 :TX_PROXY DEBUG: actor# [1:7439873642680837982:2133] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7439873664155675325:2648] 2024-11-21T23:09:33.367101Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } Temporary: false } } } UserToken: "" DatabaseName: "" 2024-11-21T23:09:33.367482Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:09:33.367561Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:09:33.367748Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:09:33.367912Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:09:33.369231Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2024-11-21T23:09:33.369436Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] txid# 281474976710658 HANDLE EvClientConnected 2024-11-21T23:09:33.369907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:33.370659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:09:33.371322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:33.371344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:33.379518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2024-11-21T23:09:33.379799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:33.380065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:33.380147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:09:33.380525Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2024-11-21T23:09:33.380613Z node 1 :TX_PROXY DEBUG: Actor# [1:7439873664155675325:2648] txid# 281474976710658 SEND to# [1:7439873664155675322:2305] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2024-11-21T23:09:33.382634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:33.382678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:33.382709Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:09:33.382993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Gener ... 281474976715765:2 ProgressState 2024-11-21T23:10:04.011397Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715765:2 progress is 3/3 2024-11-21T23:10:04.011479Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:0 2024-11-21T23:10:04.011659Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:1 2024-11-21T23:10:04.011702Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:2 2024-11-21T23:10:04.013254Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715765 2024-11-21T23:10:04.016487Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715762, at schemeshard: 72057594046644480 2024-11-21T23:10:04.022613Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976715766:0, path# /Root/table 2024-11-21T23:10:04.022780Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715766:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:04.026738Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715766, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2024-11-21T23:10:04.026875Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976715766, status# StatusAccepted 2024-11-21T23:10:04.027172Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976715766:0 ProgressState 2024-11-21T23:10:04.030435Z node 10 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715766, at schemeshard: 72057594046644480 2024-11-21T23:10:04.032863Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230604076, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:04.032907Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976715766:0 HandleReply TEvOperationPlan: step# 1732230604076 2024-11-21T23:10:04.032924Z node 10 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715766:0 128 -> 240 2024-11-21T23:10:04.034850Z node 10 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715766:0 ProgressState 2024-11-21T23:10:04.034966Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715766:0 progress is 1/1 2024-11-21T23:10:04.035018Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715766:0 2024-11-21T23:10:04.037495Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715766 2024-11-21T23:10:04.040598Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715762 2024-11-21T23:10:04.099025Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7439873796053040207:2355] [0] Resolve database: name# /Root 2024-11-21T23:10:04.099572Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7439873796053040207:2355] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:10:04.099598Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7439873796053040207:2355] [0] Send request: schemeShardId# 72057594046644480 2024-11-21T23:10:04.100497Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7439873796053040207:2355] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:28228" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1732230603 } EndTime { seconds: 1732230604 } } 2024-11-21T23:10:04.123968Z node 10 :TX_PROXY DEBUG: actor# [10:7439873765988267056:2135] Handle TEvNavigate describe path /Root/table 2024-11-21T23:10:04.124039Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873796053040215:3675] HANDLE EvNavigateScheme /Root/table 2024-11-21T23:10:04.124432Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873796053040215:3675] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T23:10:04.124555Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873796053040215:3675] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2024-11-21T23:10:04.126153Z node 10 :TX_PROXY DEBUG: Actor# [10:7439873796053040215:3675] Handle TEvDescribeSchemeResult Forward to# [10:7439873796053040213:2356] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1732230603649 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } } TableSchemaVersion: 3 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 7 PathOwnerId: 72057594046644480 >> VDiskBalancing::TestStopOneNode_Block42 >> YdbSdkSessionsPool::PeriodicTask10 >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:51.678145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:51.678222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.678283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:51.678321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:51.678362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:51.678597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:51.678681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.678992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:51.757682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:51.757772Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:51.773369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:51.773814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:51.773969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:51.785139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:51.785484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:51.786030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.786608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:51.789438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.790629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:51.790680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.790749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:51.790799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:51.790844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:51.791092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:51.797235Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:51.918111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:51.918283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.918547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:51.918763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:51.918825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.921297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.921440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:51.921711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.921776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:51.921826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:51.921855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:51.923862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.923930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:51.923972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:51.926381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.926453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.926519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.926564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.930246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:51.935371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:51.935617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:51.936660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.936797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:51.936840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.937092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:51.937150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.937338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:51.937421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:51.939675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:51.939726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:51.939887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.939923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:51.940266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.940308Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:51.940391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:51.940422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.940482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:51.940538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.940593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:51.940619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:51.940675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:51.940712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:51.940742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... : TxMoveTable, at tablet72057594046678944 2024-11-21T23:10:06.407824Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T23:10:06.407855Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T23:10:06.407931Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet72057594046678944 2024-11-21T23:10:06.408223Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:10:06.408393Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2024-11-21T23:10:06.408507Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:10:06.408561Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:10:06.410954Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:10:06.411075Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:10:06.413265Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:06.413421Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:06.413653Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:10:06.413712Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:10:06.413880Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:10:06.414001Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:10:06.414037Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2024-11-21T23:10:06.414079Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:201:2204], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2024-11-21T23:10:06.414321Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:06.414363Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:10:06.414456Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:06.414487Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:10:06.414520Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T23:10:06.415806Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:10:06.415889Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:10:06.415922Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T23:10:06.415952Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T23:10:06.415985Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T23:10:06.417683Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:10:06.417775Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:10:06.417802Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T23:10:06.417837Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T23:10:06.417872Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:10:06.417941Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2024-11-21T23:10:06.420111Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:06.420165Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:10:06.420207Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:10:06.420314Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T23:10:06.420349Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T23:10:06.420387Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T23:10:06.420427Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T23:10:06.420461Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T23:10:06.420490Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T23:10:06.420590Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:10:06.420623Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:10:06.421149Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:10:06.421192Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:10:06.421247Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:10:06.421723Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:10:06.424835Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:10:06.425542Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T23:10:06.425789Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T23:10:06.425826Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T23:10:06.426150Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T23:10:06.426231Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T23:10:06.426261Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:463:2438] TestWaitNotification: OK eventTxId 1003 2024-11-21T23:10:06.426750Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:10:06.426983Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 281us result status StatusSuccess 2024-11-21T23:10:06.427384Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2024-11-21T23:09:29.540063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873646350257460:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:29.540214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c88/r3tmp/tmpU3zzct/pdisk_1.dat 2024-11-21T23:09:30.101318Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:30.132912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:30.133040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:30.157928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2472, node 1 2024-11-21T23:09:30.214965Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:30.214985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:30.214994Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:30.215085Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:30.514768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:30.558195Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:34.331366Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873667077713142:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c88/r3tmp/tmpUeCgVQ/pdisk_1.dat 2024-11-21T23:09:34.395096Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:34.470938Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:34.512593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:34.512691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:34.519426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5106, node 2 2024-11-21T23:09:34.679617Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:34.679640Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:34.679647Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:34.679737Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:34.923878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:46.069300Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:46.088842Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:46.089285Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:46.089869Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:46.091681Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:46.091963Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c88/r3tmp/tmpAiI30e/pdisk_1.dat 2024-11-21T23:09:46.543889Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61101, node 3 TClient is connected to server localhost:28666 2024-11-21T23:09:47.058091Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:47.058158Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:47.058203Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:47.058705Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:57.454257Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:57.454900Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:57.455190Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:57.456017Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:57.456355Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:57.456660Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c88/r3tmp/tmpymyGTe/pdisk_1.dat 2024-11-21T23:09:57.857370Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1275, node 5 TClient is connected to server localhost:15773 2024-11-21T23:09:58.463788Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:58.463871Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:58.463913Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:58.464358Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T23:10:04.418777Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:453:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:04.419376Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:04.419468Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c88/r3tmp/tmpfUrMSA/pdisk_1.dat 2024-11-21T23:10:04.806292Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7863, node 7 TClient is connected to server localhost:18867 2024-11-21T23:10:05.493483Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:05.493562Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:05.493609Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:05.494147Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2024-11-21T23:09:50.912321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:50.915914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:50.916098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001355/r3tmp/tmpbbxe5G/pdisk_1.dat 2024-11-21T23:09:51.753618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:09:51.831168Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:51.938707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:51.938917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:51.969902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:52.117598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:52.175736Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:09:52.176018Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:52.230800Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:52.230981Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:52.232950Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:09:52.233047Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:09:52.233110Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:09:52.233510Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:52.290065Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:09:52.290279Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:52.290462Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:09:52.290508Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:52.290545Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:09:52.290583Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.291558Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:09:52.291671Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:09:52.291769Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:09:52.291854Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.291900Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.291953Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:09:52.292016Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:52.292213Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:52.292508Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:52.292619Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:09:52.294474Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:52.305897Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:52.306019Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:09:52.506739Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:09:52.530330Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:09:52.530455Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.530939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.530990Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:09:52.531045Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:09:52.531350Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:09:52.531540Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:09:52.532283Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.532353Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:09:52.542840Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:09:52.543384Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.566468Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:09:52.566557Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.567374Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:09:52.567437Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:09:52.567538Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:52.568903Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:52.568951Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:52.569018Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:09:52.569092Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:09:52.569144Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:09:52.569230Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.581897Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:52.586784Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:09:52.597632Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:09:52.597778Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:09:52.653989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:52.654113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:52.654202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:52.664114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:09:52.672287Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:52.893233Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:52.896924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:09:53.908891Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fte4aarfknyb5k37ajvvs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk5Zjc1OTUtZTZkMmNmMDQtNDRlOGRkZWUtYmYzYzYzMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:09:53.922740Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T23:09:53.923050Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:53.937430Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:53.937610Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:54.278564Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8ftfdyat5tdf91m5mfabd5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZiZDIwY2UtZmE3NmQ0ZjgtNjczNGFkZDktYTEzZDM5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:09:54.313102Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired l ... ode 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2024-11-21T23:10:06.675691Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2024-11-21T23:10:06.675759Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:10:06.677908Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 904 RawX2: 12884904620 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\210\003\000\000\000\000\000\000\021\254\n\000\000\003\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2024-11-21T23:10:06.677983Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:10:06.678102Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:06.678319Z node 3 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2024-11-21T23:10:06.681546Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T23:10:06.681661Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:10:06.681724Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T23:10:06.681775Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:10:06.681812Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:10:06.681857Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T23:10:06.681930Z node 3 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T23:10:06.681972Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:10:06.681996Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:10:06.682019Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2024-11-21T23:10:06.682041Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2024-11-21T23:10:06.682071Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:10:06.682093Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2024-11-21T23:10:06.682114Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2024-11-21T23:10:06.682136Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T23:10:06.682187Z node 3 :TX_DATASHARD TRACE: Requested stream clearance from [3:904:2732] for [0:281474976715665] at 72075186224037888 2024-11-21T23:10:06.682228Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2024-11-21T23:10:06.682537Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2024-11-21T23:10:06.682587Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2024-11-21T23:10:06.682770Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [3:904:2732], Recipient [3:631:2536]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2024-11-21T23:10:06.682807Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2024-11-21T23:10:06.682910Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:631:2536], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:06.682946Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:06.683019Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:06.683077Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:10:06.683132Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2024-11-21T23:10:06.683174Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2024-11-21T23:10:06.683232Z node 3 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2024-11-21T23:10:06.683281Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:10:06.683330Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2024-11-21T23:10:06.683376Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2024-11-21T23:10:06.683434Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2024-11-21T23:10:06.683765Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2024-11-21T23:10:06.683801Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:10:06.683846Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:10:06.683897Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:10:06.683944Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:10:06.684674Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [3:911:2737], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2024-11-21T23:10:06.684719Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2024-11-21T23:10:06.684956Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2024-11-21T23:10:06.685568Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T23:10:06.685755Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2024-11-21T23:10:06.685814Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2024-11-21T23:10:06.687760Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:06.687838Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:06.688591Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T23:10:06.688646Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2024-11-21T23:10:06.688885Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:897:2725], Recipient [3:631:2536]: NKikimr::TEvTabletPipe::TEvClientDestroyed 2024-11-21T23:10:06.688936Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2024-11-21T23:10:06.689020Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:631:2536], Recipient [3:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:06.689050Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:06.689100Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:06.689138Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:10:06.689178Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2024-11-21T23:10:06.689209Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2024-11-21T23:10:06.689249Z node 3 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2024-11-21T23:10:06.689298Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:10:06.689331Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2024-11-21T23:10:06.689360Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:10:06.689393Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:06.689441Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T23:10:06.689511Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2024-11-21T23:10:06.689542Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:10:06.689584Z node 3 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:10:06.689624Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:10:06.689678Z node 3 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:10:06.689701Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:10:06.689733Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T23:10:06.689774Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:06.689803Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:10:06.689830Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:10:06.689858Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:10:06.689918Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:06.689966Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:06.690017Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob >> YdbSdkSessionsPool::WaitQueue10 [GOOD] >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2024-11-21T23:09:50.906027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:50.911200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:50.911381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001376/r3tmp/tmpliDyK0/pdisk_1.dat 2024-11-21T23:09:51.753947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:09:51.855403Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:51.944235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:51.944454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:51.970951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:52.117114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:52.191805Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T23:09:52.192160Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:52.246788Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:52.247116Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:52.248966Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:09:52.249060Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:09:52.249122Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:09:52.249500Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:52.285237Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:09:52.285502Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:52.285657Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T23:09:52.285708Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:09:52.285760Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:09:52.285815Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.287676Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T23:09:52.287965Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:52.297815Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:09:52.297943Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:09:52.298549Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T23:09:52.298834Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.298879Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.298924Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:09:52.298968Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:09:52.299206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:09:52.299494Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:09:52.299616Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:09:52.300815Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:52.300964Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:09:52.302358Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T23:09:52.302480Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T23:09:52.302536Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T23:09:52.302826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:09:52.302871Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T23:09:52.302951Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:09:52.303069Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T23:09:52.303103Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T23:09:52.303132Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T23:09:52.303168Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:09:52.304031Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T23:09:52.304121Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T23:09:52.304231Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:09:52.304263Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.304300Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T23:09:52.304335Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:09:52.304719Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T23:09:52.304856Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T23:09:52.305069Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T23:09:52.305159Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T23:09:52.305903Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:09:52.305981Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T23:09:52.317356Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:09:52.317471Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:09:52.318034Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T23:09:52.318087Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T23:09:52.515535Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T23:09:52.515848Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T23:09:52.522956Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T23:09:52.523064Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:09:52.523458Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:09:52.523508Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:09:52.523621Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T23:09:52.523953Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:09:52.529551Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:09:52.530426Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:09:52.530483Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:09:52.530600Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:09:52.530680Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:09:52.546724Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:09:52.547258Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:09:52.548659Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.548714Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:09:52.548769Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:09:52.549000Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:09:52.549128Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:09:52.570711Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T23:09:52.570782Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:09:52.571137Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:09:52.571224Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... an for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:10:06.700528Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2024-11-21T23:10:06.700557Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2024-11-21T23:10:06.700598Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T23:10:06.700732Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:10:06.700766Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2024-11-21T23:10:06.700804Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:10:06.700844Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:10:06.700885Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:10:06.700905Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:10:06.700956Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2024-11-21T23:10:06.711928Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2024-11-21T23:10:06.712007Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2024-11-21T23:10:06.712056Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2024-11-21T23:10:06.963034Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T23:10:06.963109Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] TxId# 281474976715662 ProcessProposeKqpTransaction 2024-11-21T23:10:06.964723Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8ftvvw5ewydkhjef2r4xnc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDdjYTM5YTEtODhhM2RiODMtOThmYWRmZWMtZDdmZTVlMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:06.969957Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1068:2869], Recipient [3:634:2539]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T23:10:06.970125Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:10:06.970180Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2024-11-21T23:10:06.970216Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2024-11-21T23:10:06.970273Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2024-11-21T23:10:06.970373Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T23:10:06.970456Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:10:06.970502Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:10:06.970531Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:10:06.970578Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2024-11-21T23:10:06.970762Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T23:10:06.970793Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:10:06.970814Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:10:06.970838Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:10:06.970957Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T23:10:06.971216Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1068:2869], 0} after executionsCount# 1 2024-11-21T23:10:06.971266Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1068:2869], 0} sends rowCount# 2, bytes# 48, quota rows left# 999, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:10:06.971414Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1068:2869], 0} finished in read 2024-11-21T23:10:06.971487Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T23:10:06.971511Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:10:06.971532Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:10:06.971552Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:10:06.971595Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2024-11-21T23:10:06.971624Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:10:06.971649Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2024-11-21T23:10:06.971684Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:10:06.971791Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T23:10:06.974116Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1068:2869], Recipient [3:634:2539]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T23:10:06.974175Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2024-11-21T23:10:07.132295Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T23:10:07.132369Z node 3 :TX_PROXY DEBUG: actor# [3:52:2099] TxId# 281474976715663 ProcessProposeKqpTransaction 2024-11-21T23:10:07.133055Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8ftw42csyb7qjx1tdwfmqy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTg2ZWU2Y2YtM2ZmNWE3YS00ZDIyODEwZi02YzIzZTkwZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:07.143919Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1097:2892], Recipient [3:867:2702]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2024-11-21T23:10:07.144060Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2024-11-21T23:10:07.144120Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2024-11-21T23:10:07.144161Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2024-11-21T23:10:07.144232Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2024-11-21T23:10:07.144354Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T23:10:07.144395Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2024-11-21T23:10:07.144435Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T23:10:07.144470Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T23:10:07.144520Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2024-11-21T23:10:07.144569Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T23:10:07.144593Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T23:10:07.144618Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2024-11-21T23:10:07.144647Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2024-11-21T23:10:07.144753Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2024-11-21T23:10:07.145115Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1097:2892], 0} after executionsCount# 1 2024-11-21T23:10:07.145203Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1097:2892], 0} sends rowCount# 2, bytes# 48, quota rows left# 32765, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:10:07.145359Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1097:2892], 0} finished in read 2024-11-21T23:10:07.145430Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T23:10:07.145458Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T23:10:07.145483Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:10:07.145508Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:10:07.145561Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2024-11-21T23:10:07.145586Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:10:07.145613Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2024-11-21T23:10:07.145654Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T23:10:07.145785Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T23:10:07.152784Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1097:2892], Recipient [3:867:2702]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T23:10:07.152882Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> YdbSdkSessionsPool::RunSmallPlan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2024-11-21T23:10:02.187098Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873786001484506:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:02.187137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001198/r3tmp/tmpJBHfkT/pdisk_1.dat 2024-11-21T23:10:02.792790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:02.792887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:02.796763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:02.825050Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4670, node 1 2024-11-21T23:10:02.860465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:10:02.866885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:02.867128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: Root, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:10:02.887172Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:10:02.887277Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:10:02.914419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:02.932333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:02.932404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:02.932495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:10:02.932577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:10:03.037308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:03.037332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:03.037341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:03.037426Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:03.447347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:03.460980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:03.461060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:03.464278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:10:03.464497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:10:03.464514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:10:03.468119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:03.469288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:10:03.469307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:10:03.471432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:03.475489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230603523, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:03.475527Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:10:03.475801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:10:03.477468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:03.477625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:03.477669Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:10:03.477743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:10:03.477791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:10:03.477829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:10:03.480494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:10:03.480547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:10:03.480617Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:10:03.480707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:10:07.187648Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873786001484506:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:07.187741Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |81.7%| [TA] {RESULT} $(B)/ydb/core/erasure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> YdbProxy::CreateTopic |81.7%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:09:02.041930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:09:02.042012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:02.042057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:09:02.042094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:09:02.042170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:09:02.042200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:09:02.042274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:02.042717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:02.136109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:09:02.136189Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:09:02.150375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:02.150906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:09:02.151132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:09:02.176195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:09:02.176539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:09:02.177142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:02.179290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:02.185765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:02.187305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:02.187372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:02.187461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:02.187515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:02.187557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:02.187858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:09:02.197163Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:09:02.363006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:02.363296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.363525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:02.363815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:02.363881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.366527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:02.366675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:02.366972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.367050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:02.367138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:02.367182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:02.368882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.368930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:02.368961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:02.370744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.370798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.370853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:02.370903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:02.375132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:02.376698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:02.376870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:02.377702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:02.377810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:02.377846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:02.378076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:02.378117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:02.378263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:02.378343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:02.380293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:02.380341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:02.380540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:02.380600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:02.380897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:02.380931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:02.381015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:02.381044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:02.381078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:02.381128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:02.381170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:02.381196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:02.381250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:02.381280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:02.381308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:10:09.463344Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:10:09.463525Z node 30 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 210us result status StatusSuccess 2024-11-21T23:10:09.464134Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:10:09.475497Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:821:2657] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:10:09.475608Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:760:2657] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T23:10:09.475738Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:821:2657] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732230609453908 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732230609453908 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732230609453908 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:10:09.477762Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:821:2657] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T23:10:09.477875Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:760:2657] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> YdbSdkSessionsPool::StressTestSync10 [GOOD] >> Donor::SlayAfterWiping >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2024-11-21T23:09:34.850475Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.850516Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.850551Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.850961Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.865400Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.865552Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.866040Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.866525Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.866634Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:34.866713Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.866756Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:09:34.867567Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.867693Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.867715Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.868128Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.870571Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.870709Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.872539Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.873038Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.873686Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:34.873783Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.873845Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:09:34.875138Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.875164Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.875188Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.875530Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.876184Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.876328Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.876619Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.877484Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.877648Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:34.877722Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.877764Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:09:34.878821Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.878863Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.878904Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.879195Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.879831Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.879943Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.880190Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.881758Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.882229Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:34.882320Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.882361Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:09:34.883123Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.883141Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.883163Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.883419Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.883863Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.883940Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.884119Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.884418Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.884511Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:34.884579Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.884608Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:09:34.885141Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.885157Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.885226Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.885476Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.885979Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.886095Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.886250Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.951138Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.951341Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:34.951436Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.951485Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:09:34.952671Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.952700Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.952733Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.953028Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.953572Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.953671Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.957078Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.957981Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.958216Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:34.958295Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.958351Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2024-11-21T23:09:34.963564Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.963592Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.970497Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:34.971009Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:09:34.971731Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:09:34.971906Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.974667Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:09:34.976291Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:34.976729Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:09:34.976828Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:09:34.976877Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2024-11-21T23:09:35.002280Z :ReadSession INFO: Random seed for debugging is 1732230575002239 2024-11-21T23:09:35.528696Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873672185789528:2079];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:35.533276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:35.987130Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:09:36.030780Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029e2/r3tmp/tmppuvHnW/pdisk_1.dat 2024-11-21T23:09:36.169703Z node 2 :METADATA_PROVIDER ERROR: fline=ac ... onsumer shared/user session shared/user_1_1_1635450116842652047_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 4 from offset3 2024-11-21T23:09:57.500012Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid aacf3a01-3fa3123c-9ea1070d-ebf1fe0f has messages 1 2024-11-21T23:09:57.500099Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 read done: guid# aacf3a01-3fa3123c-9ea1070d-ebf1fe0f, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 200 2024-11-21T23:09:57.590856Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2024-11-21T23:09:57.500123Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 response to read: guid# aacf3a01-3fa3123c-9ea1070d-ebf1fe0f 2024-11-21T23:09:57.500325Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 Process answer. Aval parts: 0 2024-11-21T23:09:57.505179Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 grpc read done: success# 1, data# { read { } } GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2024-11-21T23:09:57.486000Z WriteTime: 2024-11-21T23:09:57.486000Z Ip: "ipv6:[::1]:42006" UncompressedSize: 8 Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:42006" } } } 2024-11-21T23:09:57.591074Z :DEBUG: [/Root] [/Root] [e68e9002-f1effc4a-2feee9fb-26764b5c] [dc1] Commit offsets [2, 3). Partition stream id: 1 2024-11-21T23:09:57.591455Z :DEBUG: [/Root] [/Root] [e68e9002-f1effc4a-2feee9fb-26764b5c] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:09:57.584589Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0] Write session: acknoledged message 1 2024-11-21T23:09:57.593285Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0] Write session will now close 2024-11-21T23:09:57.593349Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0] Write session: aborting 2024-11-21T23:09:57.594229Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:09:57.505259Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 got read request: guid# 2002f150-6f6b99f6-93bd27fd-4ceceee5 2024-11-21T23:09:57.591943Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2024-11-21T23:09:57.595981Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0] Write session is aborting and will not restart 2024-11-21T23:09:57.596601Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:09:57.606009Z :DEBUG: [/Root] [/Root] [e68e9002-f1effc4a-2feee9fb-26764b5c] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2024-11-21T23:09:57.592149Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2024-11-21T23:09:57.594543Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0 grpc read done: success: 0 data: 2024-11-21T23:09:57.594559Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0 grpc read failed 2024-11-21T23:09:57.594609Z node 1 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 4 sessionId: test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0 2024-11-21T23:09:57.594633Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0 is DEAD 2024-11-21T23:09:57.594914Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:09:57.605012Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2024-11-21T23:09:57.605068Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2024-11-21T23:09:57.605135Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2024-11-21T23:09:57.596648Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439873766675072953:2590] destroyed 2024-11-21T23:09:57.596710Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:09:57.596734Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:09:57.596856Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:09:57.596982Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_1635450116842652047_v1 2024-11-21T23:09:57.597096Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:09:57.604096Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:09:57.698628Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|5ae8dcd2-ffce394c-3c96affd-3d5f08bc_0] Write session: destroy 2024-11-21T23:09:57.604162Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:09:57.604223Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2024-11-21T23:09:59.215876Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:09:59.762850Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2024-11-21T23:10:04.230499Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:10:07.510505Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2024-11-21T23:10:07.714493Z :INFO: [/Root] [/Root] [e68e9002-f1effc4a-2feee9fb-26764b5c] Closing read session. Close timeout: 0.000000s 2024-11-21T23:10:07.714583Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2024-11-21T23:10:07.714647Z :INFO: [/Root] [/Root] [e68e9002-f1effc4a-2feee9fb-26764b5c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 17047 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:10:07.714818Z :NOTICE: [/Root] [/Root] [e68e9002-f1effc4a-2feee9fb-26764b5c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:10:07.714883Z :DEBUG: [/Root] [/Root] [e68e9002-f1effc4a-2feee9fb-26764b5c] [dc1] Abort session to cluster 2024-11-21T23:10:07.716036Z :NOTICE: [/Root] [/Root] [e68e9002-f1effc4a-2feee9fb-26764b5c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:10:07.722521Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 grpc closed 2024-11-21T23:10:07.722607Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_1635450116842652047_v1 is DEAD 2024-11-21T23:10:07.730512Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439873736610301388:2507] disconnected; active server actors: 1 2024-11-21T23:10:07.730578Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7439873736610301388:2507] client user disconnected session shared/user_1_1_1635450116842652047_v1 2024-11-21T23:10:07.730856Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:10:07.734898Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_1635450116842652047_v1 2024-11-21T23:10:07.734947Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439873736610301394:2510] destroyed 2024-11-21T23:10:07.738755Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_1635450116842652047_v1 2024-11-21T23:10:08.471689Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439873813919713807:2694] TxId: 281474976710721. Ctx: { TraceId: 01jd8ftx2pa97ne4v7b13bnwq3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAyMzE5MjYtYmFiNWZiNGQtNzZkMjhkYTEtYmRiYTgwNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2024-11-21T23:10:08.482258Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439873813919713818:2703], TxId: 281474976710721, task: 2. Ctx: { TraceId : 01jd8ftx2pa97ne4v7b13bnwq3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTAyMzE5MjYtYmFiNWZiNGQtNzZkMjhkYTEtYmRiYTgwNTE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439873813919713807:2694], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:10:08.483173Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439873813919713819:2704], TxId: 281474976710721, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTAyMzE5MjYtYmFiNWZiNGQtNzZkMjhkYTEtYmRiYTgwNTE=. TraceId : 01jd8ftx2pa97ne4v7b13bnwq3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439873813919713807:2694], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2024-11-21T23:10:05.450529Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873799650123934:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:05.451014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001181/r3tmp/tmpjxDkKi/pdisk_1.dat 2024-11-21T23:10:06.132615Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:06.136114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:06.136687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:06.142234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32634, node 1 2024-11-21T23:10:06.381248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:06.381274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:06.381280Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:06.381364Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:06.734155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:06.738828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:06.738870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:06.752634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:10:06.752890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:10:06.752921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:10:06.754932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:10:06.754965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:10:06.757754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:06.760012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:06.762643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230606806, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:06.762683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:10:06.762991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:10:06.765001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:06.765165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:06.765212Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:10:06.765283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:10:06.765317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:10:06.765361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:10:06.768622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:10:06.768661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:10:06.768681Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:10:06.768775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 >> TPersQueueTest::DefaultMeteringMode [GOOD] >> TPersQueueTest::DisableWrongSettings >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ShardsLimit800 [GOOD] |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2024-11-21T23:08:32.845844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:32.845891Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:32.845969Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:32.857885Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:32.858280Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T23:08:32.858502Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:32.868270Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:32.911007Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:32.911577Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:32.913087Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T23:08:32.913169Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T23:08:32.913275Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T23:08:32.913584Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:32.960392Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T23:08:32.960581Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:32.960747Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T23:08:32.960799Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T23:08:32.960839Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T23:08:32.960882Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:32.961186Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.961263Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:32.961360Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T23:08:32.961491Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T23:08:32.961717Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:32.961764Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:32.961797Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T23:08:32.961829Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:32.961861Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:32.961889Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T23:08:32.961927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:33.006338Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.006423Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.006475Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T23:08:33.009022Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T23:08:33.009081Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:33.009162Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T23:08:33.009326Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T23:08:33.009375Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T23:08:33.009428Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T23:08:33.009465Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:33.009495Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T23:08:33.009527Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T23:08:33.009554Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:33.009866Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:33.009903Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T23:08:33.009933Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T23:08:33.009972Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:33.010024Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T23:08:33.010050Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T23:08:33.010076Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T23:08:33.010123Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:33.010159Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:33.035220Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T23:08:33.035297Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:33.035359Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:33.035406Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T23:08:33.035478Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:33.036058Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.036113Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:33.036158Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T23:08:33.036580Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T23:08:33.036657Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:33.036789Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:33.036828Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:33.036865Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T23:08:33.036935Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T23:08:33.040698Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T23:08:33.040771Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:33.040970Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.041006Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:33.041058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:33.041095Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:33.041147Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:33.041187Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T23:08:33.041232Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T23:08:33.041284Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:33.041317Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:33.041351Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:33.041382Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:33.041562Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T23:08:33.041598Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:33.041622Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:33.041642Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T23:08:33.041664Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T23:08:33.041725Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:33.041748Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T23:08:33.041780Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:33.041826Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:33.041884Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T23:08:33.041917Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T23:08:33.041957Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T23:08:33.042008Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:33.042044Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:33.042081Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit MakeS ... 1 ms 2024-11-21T23:10:09.058508Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.058711Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.058744Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.058788Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.058822Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.059054Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.059086Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.059129Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.059161Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.059319Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.059352Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.059416Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.059454Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.059631Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.059666Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.059710Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.059743Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.059928Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.059963Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.060005Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.060040Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.060244Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.060297Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.060367Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.060406Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.060551Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.060583Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.060630Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.060663Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.060862Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.060899Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.060945Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.060983Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.061189Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.061222Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.061268Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.061304Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.061558Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.061592Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.061639Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.061676Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.061903Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.061938Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.061987Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.062024Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.062273Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.062309Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.062356Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.062412Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.062593Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.062628Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.062674Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.062709Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.062921Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:09.062957Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2024-11-21T23:10:09.063011Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:97:2132], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:10:09.063045Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:09.063361Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2024-11-21T23:10:09.063414Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:09.063456Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2024-11-21T23:10:09.063605Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2024-11-21T23:10:09.063639Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:09.063672Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2024-11-21T23:10:09.063766Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2024-11-21T23:10:09.063798Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:09.063829Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2024-11-21T23:10:09.063910Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2024-11-21T23:10:09.063943Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:09.063991Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2024-11-21T23:10:09.064088Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2024-11-21T23:10:09.064119Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:09.064149Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2024-11-21T23:10:09.064233Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2024-11-21T23:10:09.064286Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:09.064319Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2024-11-21T23:10:09.064405Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2024-11-21T23:10:09.064435Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:09.064474Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2024-11-21T23:10:09.064570Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:781:2717], Recipient [32:333:2306]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2024-11-21T23:10:09.064602Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:09.064630Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 30 22 28 30 30 31 21 29 22 31 27 27 31 26 29 27 31 27 26 31 31 2 11 3 29 - 2 29 - - - - actual 30 22 28 30 30 31 21 29 22 31 27 27 31 26 29 27 31 27 26 31 31 2 11 3 29 - 2 29 - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |81.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 12942229911659481160 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2024-11-21T23:10:09.015980Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:185:16] ServerId# [1:284:55] TabletId# 72057594037932033 PipeClientId# [3:185:16] 2024-11-21T23:10:09.016139Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:215:16] ServerId# [1:289:60] TabletId# 72057594037932033 PipeClientId# [8:215:16] 2024-11-21T23:10:09.016229Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:203:16] ServerId# [1:287:58] TabletId# 72057594037932033 PipeClientId# [6:203:16] 2024-11-21T23:10:09.016349Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:197:16] ServerId# [1:286:57] TabletId# 72057594037932033 PipeClientId# [5:197:16] 2024-11-21T23:10:09.016417Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:191:16] ServerId# [1:285:56] TabletId# 72057594037932033 PipeClientId# [4:191:16] 2024-11-21T23:10:09.016535Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:179:16] ServerId# [1:283:54] TabletId# 72057594037932033 PipeClientId# [2:179:16] 2024-11-21T23:10:09.016642Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:209:16] ServerId# [1:288:59] TabletId# 72057594037932033 PipeClientId# [7:209:16] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] Test command err: 2024-11-21T23:09:57.079440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873764554548344:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:57.178869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011c3/r3tmp/tmph8yIlB/pdisk_1.dat 2024-11-21T23:09:58.142847Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:58.172261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:58.172362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:58.209595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:58.220868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 63849, node 1 2024-11-21T23:09:58.824233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:58.824267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:58.824274Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:58.824399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:59.310843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.316840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:59.316893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.328540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:59.328807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:59.328823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:09:59.335595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:59.335630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:09:59.336836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:59.343656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.356228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230599393, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:59.356276Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:59.356575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:59.360063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:59.360247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:59.360308Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:59.360424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:59.360478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:59.360533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:59.364781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:59.365717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:59.372749Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:59.372897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:10:02.034532Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873764554548344:2188];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:02.034624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] >> YdbProxy::DescribePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2024-11-21T23:06:41.738702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872923821801064:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:06:41.739128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023d8/r3tmp/tmpN46ICk/pdisk_1.dat 2024-11-21T23:06:42.366867Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:06:42.401718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:06:42.401824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:06:42.408966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2169, node 1 2024-11-21T23:06:42.538478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:06:42.538499Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:06:42.538512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:06:42.538597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:06:42.986927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:06:43.030788Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:06:43.087425Z node 1 :TICKET_PARSER ERROR: Ticket **** (7F399AB7): System domain not available for user usage 2024-11-21T23:06:45.764119Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2024-11-21T23:06:45.767699Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872941001670723:2301], Start check tables existence, number paths: 2 2024-11-21T23:06:45.768000Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2024-11-21T23:06:45.768014Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2024-11-21T23:06:45.768025Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2024-11-21T23:06:45.770953Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2Q0Y2Q4ZjMtYjNkNTljZjctYmJmY2I3MDItYTJiNTRhNDk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id N2Q0Y2Q4ZjMtYjNkNTljZjctYmJmY2I3MDItYTJiNTRhNDk= 2024-11-21T23:06:45.771427Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872941001670723:2301], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2024-11-21T23:06:45.771464Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872941001670723:2301], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2024-11-21T23:06:45.771486Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7439872941001670723:2301], Successfully finished 2024-11-21T23:06:45.774933Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2024-11-21T23:06:45.775000Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=N2Q0Y2Q4ZjMtYjNkNTljZjctYmJmY2I3MDItYTJiNTRhNDk=, ActorId: [1:7439872941001670746:2303], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:45.794502Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872941001670749:2299], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:45.798717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:06:45.801177Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872941001670749:2299], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2024-11-21T23:06:45.804383Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872941001670749:2299], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2024-11-21T23:06:45.819970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872941001670749:2299], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:06:45.883634Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872941001670749:2299], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2024-11-21T23:06:45.887912Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439872941001670749:2299], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2024-11-21T23:06:45.901210Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA= 2024-11-21T23:06:45.901508Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA=, ActorId: [1:7439872941001670808:2304], ActorState: unknown state, session actor bootstrapped 2024-11-21T23:06:45.901638Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA=, ActorId: [1:7439872941001670808:2304], ActorState: ReadyState, TraceId: 01jd8fmqrddnwype2bm5xdkgkj, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7439872941001670807:2337] database: Root databaseId: /Root pool id: sample_pool_id 2024-11-21T23:06:45.901699Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:06:45.901712Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2024-11-21T23:06:45.901765Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7439872941001670808:2304], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA= 2024-11-21T23:06:45.901810Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872941001670810:2305], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:45.902861Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872941001670811:2306], Database: /Root, Start database fetching 2024-11-21T23:06:45.903250Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7439872941001670811:2306], Database: /Root, Database info successfully fetched, serverless: 0 2024-11-21T23:06:45.903289Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2024-11-21T23:06:45.903337Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439872941001670820:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA=, Start pool fetching 2024-11-21T23:06:45.903368Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872941001670821:2308], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:06:45.904057Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872941001670821:2308], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:45.904103Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7439872941001670820:2307], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA=, Pool info successfully resolved 2024-11-21T23:06:45.904132Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA= 2024-11-21T23:06:45.904145Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2024-11-21T23:06:45.904433Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872941001670824:2309], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7439872941001670808:2304], session id: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA= 2024-11-21T23:06:45.904507Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7439872941001670824:2309], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2024-11-21T23:06:45.904546Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439872941001670810:2305], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2024-11-21T23:06:45.904568Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=OTNhMzBhN2MtYmVlZTU1ZTEtYTcyMjI2YTctYWQ0NGRkZTA= 2024-11-21T23:06:45.904603Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got create teables request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:06:45.904677Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service tables creation 2024-11-21T23:06:45.904865Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2024-11-21T23:06:45.904908Z node 1 ... rId: [6:7439873446993366108:2415], ActorState: ExecuteState, TraceId: 01jd8frawyba4znvm4h6kfhcem, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:08:43.904263Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OWU0MTVlNjQtNGVhNmY0MTAtMTc0NzlhYTctM2RhOGI0Yzg=, ActorId: [6:7439873446993366108:2415], ActorState: ExecuteState, TraceId: 01jd8frawyba4znvm4h6kfhcem, EndCleanup, isFinal: 1 2024-11-21T23:08:43.904321Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OWU0MTVlNjQtNGVhNmY0MTAtMTc0NzlhYTctM2RhOGI0Yzg=, ActorId: [6:7439873446993366108:2415], ActorState: ExecuteState, TraceId: 01jd8frawyba4znvm4h6kfhcem, Sent query response back to proxy, proxyRequestId: 16, proxyId: [6:7439873404043692170:2256] 2024-11-21T23:08:43.904366Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OWU0MTVlNjQtNGVhNmY0MTAtMTc0NzlhYTctM2RhOGI0Yzg=, ActorId: [6:7439873446993366108:2415], ActorState: unknown state, TraceId: 01jd8frawyba4znvm4h6kfhcem, Cleanup temp tables: 0 2024-11-21T23:08:43.904640Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=OWU0MTVlNjQtNGVhNmY0MTAtMTc0NzlhYTctM2RhOGI0Yzg=, ActorId: [6:7439873446993366108:2415], ActorState: unknown state, TraceId: 01jd8frawyba4znvm4h6kfhcem, Session actor destroyed 2024-11-21T23:08:43.916187Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: ReadyState, TraceId: 01jd8frb0be10wzk3ct5w8s94k, received request, proxyRequestId: 18 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; DROP RESOURCE POOL default; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2024-11-21T23:08:43.937461Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439873429813496434:2309], DatabaseId: /Root, PoolId: sample_pool_id, Got delete notification 2024-11-21T23:08:43.937578Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:08:43.937655Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439873446993366178:2431], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2024-11-21T23:08:43.938581Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439873446993366178:2431], DatabaseId: /Root, PoolId: sample_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2024-11-21T23:08:43.938674Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool sample_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2024-11-21T23:08:43.945100Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439873429813496635:2335], DatabaseId: /Root, PoolId: default, Got delete notification 2024-11-21T23:08:43.945204Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2024-11-21T23:08:43.945264Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439873446993366200:2432], DatabaseId: /Root, PoolId: default, Start pool fetching 2024-11-21T23:08:43.945529Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439873446993366200:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:43.945656Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:43.951324Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: ExecuteState, TraceId: 01jd8frb0be10wzk3ct5w8s94k, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [6:7439873446993366168:2304] WorkloadServiceCleanup: 0 2024-11-21T23:08:43.954033Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: CleanupState, TraceId: 01jd8frb0be10wzk3ct5w8s94k, EndCleanup, isFinal: 0 2024-11-21T23:08:43.954123Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: CleanupState, TraceId: 01jd8frb0be10wzk3ct5w8s94k, Sent query response back to proxy, proxyRequestId: 18, proxyId: [6:7439873404043692170:2256] Wait pool handlers 0.000027s: number handlers = 2 Wait pool handlers 1.000166s: number handlers = 2 Wait pool handlers 2.000306s: number handlers = 2 Wait pool handlers 3.004456s: number handlers = 2 Wait pool handlers 4.006482s: number handlers = 2 Wait pool handlers 5.010466s: number handlers = 2 2024-11-21T23:08:49.015632Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:08:49.015672Z node 6 :IMPORT WARN: Table profiles were not loaded Wait pool handlers 6.014570s: number handlers = 2 Wait pool handlers 7.015030s: number handlers = 2 Wait pool handlers 8.016970s: number handlers = 2 Wait pool handlers 9.017267s: number handlers = 2 Wait pool handlers 10.018476s: number handlers = 2 Wait pool handlers 11.018827s: number handlers = 2 Wait pool handlers 12.022479s: number handlers = 2 2024-11-21T23:08:56.813268Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439873429813496434:2309], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 13.024080s: number handlers = 2 Wait pool handlers 14.024226s: number handlers = 2 Wait pool handlers 15.026489s: number handlers = 2 Wait pool handlers 16.030485s: number handlers = 2 Wait pool handlers 17.030774s: number handlers = 2 Wait pool handlers 18.030897s: number handlers = 2 Wait pool handlers 19.034489s: number handlers = 2 Wait pool handlers 20.034613s: number handlers = 2 Wait pool handlers 21.035687s: number handlers = 2 Wait pool handlers 22.037948s: number handlers = 2 Wait pool handlers 23.038788s: number handlers = 2 Wait pool handlers 24.040385s: number handlers = 2 Wait pool handlers 25.041819s: number handlers = 2 Wait pool handlers 26.046565s: number handlers = 2 Wait pool handlers 27.050518s: number handlers = 2 Wait pool handlers 28.051357s: number handlers = 2 Wait pool handlers 29.051721s: number handlers = 2 Wait pool handlers 30.052927s: number handlers = 2 Wait pool handlers 31.054486s: number handlers = 2 Wait pool handlers 32.058514s: number handlers = 2 Wait pool handlers 33.062484s: number handlers = 2 Wait pool handlers 34.062752s: number handlers = 2 Wait pool handlers 35.065221s: number handlers = 2 Wait pool handlers 36.066468s: number handlers = 2 Wait pool handlers 37.066829s: number handlers = 2 Wait pool handlers 38.073157s: number handlers = 2 Wait pool handlers 39.073286s: number handlers = 2 Wait pool handlers 40.073417s: number handlers = 2 Wait pool handlers 41.073554s: number handlers = 2 Wait pool handlers 42.076735s: number handlers = 2 Wait pool handlers 43.079596s: number handlers = 2 Wait pool handlers 44.081796s: number handlers = 2 Wait pool handlers 45.082783s: number handlers = 2 Wait pool handlers 46.084464s: number handlers = 2 Wait pool handlers 47.084906s: number handlers = 2 Wait pool handlers 48.086483s: number handlers = 2 Wait pool handlers 49.088169s: number handlers = 2 Wait pool handlers 50.090472s: number handlers = 2 Wait pool handlers 51.095671s: number handlers = 2 Wait pool handlers 52.095865s: number handlers = 2 Wait pool handlers 53.098886s: number handlers = 2 Wait pool handlers 54.099038s: number handlers = 2 Wait pool handlers 55.099217s: number handlers = 2 Wait pool handlers 56.102485s: number handlers = 2 Wait pool handlers 57.102622s: number handlers = 2 Wait pool handlers 58.106494s: number handlers = 2 Wait pool handlers 59.106657s: number handlers = 2 Wait pool handlers 60.115087s: number handlers = 2 Wait pool handlers 61.118489s: number handlers = 2 Wait pool handlers 62.118620s: number handlers = 2 Wait pool handlers 63.118861s: number handlers = 2 Wait pool handlers 64.122473s: number handlers = 2 Wait pool handlers 65.122692s: number handlers = 2 Wait pool handlers 66.123404s: number handlers = 2 Wait pool handlers 67.127932s: number handlers = 2 Wait pool handlers 68.130491s: number handlers = 2 Wait pool handlers 69.132564s: number handlers = 2 Wait pool handlers 70.134959s: number handlers = 2 Wait pool handlers 71.138472s: number handlers = 2 Wait pool handlers 72.142477s: number handlers = 2 Wait pool handlers 73.142892s: number handlers = 2 Wait pool handlers 74.143306s: number handlers = 2 Wait pool handlers 75.143625s: number handlers = 2 Wait pool handlers 76.146480s: number handlers = 2 Wait pool handlers 77.146959s: number handlers = 2 Wait pool handlers 78.149585s: number handlers = 2 Wait pool handlers 79.150120s: number handlers = 2 Wait pool handlers 80.152623s: number handlers = 2 Wait pool handlers 81.154493s: number handlers = 2 Wait pool handlers 82.162495s: number handlers = 2 Wait pool handlers 83.166492s: number handlers = 2 Wait pool handlers 84.170504s: number handlers = 2 Wait pool handlers 85.171948s: number handlers = 2 2024-11-21T23:10:09.546995Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439873429813496635:2335], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2024-11-21T23:10:09.547193Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7439873429813496434:2309], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2024-11-21T23:10:09.547330Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2024-11-21T23:10:09.547356Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2024-11-21T23:10:10.148228Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: ReadyState, Session closed due to explicit close event 2024-11-21T23:10:10.148304Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:10:10.148338Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: ReadyState, EndCleanup, isFinal: 1 2024-11-21T23:10:10.148370Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: unknown state, Cleanup temp tables: 0 2024-11-21T23:10:10.148449Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NmQ2YjRmZjEtZTdmYTRlMC00MDllN2ExMi02MzFjOGUw, ActorId: [6:7439873429813496345:2304], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: 2024-11-21T23:09:38.451679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:38.452315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:38.452495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:38.453256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:38.453453Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:38.453571Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c39/r3tmp/tmpiJRsd3/pdisk_1.dat 2024-11-21T23:09:38.996559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12183, node 1 TClient is connected to server localhost:10900 2024-11-21T23:09:39.778332Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:39.778411Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:39.778443Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:39.778802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:48.874601Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:48.890998Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:48.891471Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:48.892115Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:48.893853Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:48.894171Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c39/r3tmp/tmpFvIjCw/pdisk_1.dat 2024-11-21T23:09:49.255251Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9345, node 3 TClient is connected to server localhost:20636 2024-11-21T23:09:49.770584Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:49.770633Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:49.770678Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:49.771097Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:59.373931Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:59.376580Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:59.376958Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:59.377999Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:59.378435Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:59.378750Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c39/r3tmp/tmpYxO5JI/pdisk_1.dat 2024-11-21T23:09:59.777394Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26398, node 5 TClient is connected to server localhost:6943 2024-11-21T23:10:00.193274Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:00.193325Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:00.193352Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:00.194239Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:09.464991Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:09.465477Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:09.465598Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:09.466983Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:09.467611Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:09.467778Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c39/r3tmp/tmp73xKDd/pdisk_1.dat 2024-11-21T23:10:09.860368Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6791, node 7 TClient is connected to server localhost:15687 2024-11-21T23:10:10.325926Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:10.325980Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:10.326007Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:10.326163Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 10004393814961602521 SEND TEvPut with key [1:1:1:0:0:100:0] 2024-11-21T23:10:10.278023Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2024-11-21T23:10:10.278308Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2024-11-21T23:10:10.352177Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] Test command err: 2024-11-21T23:09:36.937775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:36.943360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:36.943739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:36.944792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:36.945022Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:36.945155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002cc2/r3tmp/tmpj3LGii/pdisk_1.dat 2024-11-21T23:09:37.388630Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2444, node 1 TClient is connected to server localhost:27621 2024-11-21T23:09:37.842308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:37.842372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:37.842590Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:37.843086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:46.517684Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:46.531061Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:46.531462Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:46.531985Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:46.533046Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:46.533330Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002cc2/r3tmp/tmpCYE91O/pdisk_1.dat 2024-11-21T23:09:46.930880Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12535, node 3 TClient is connected to server localhost:27977 2024-11-21T23:09:47.562651Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:47.562734Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:47.562798Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:47.563301Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-9a33-70fb" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-9a33-d6d1" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-3" reason: "YELLOW-9a33-e9e2-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-d6d1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-9a33-258e-ab18" type: "STORAGE" level: 2 } issue_log { id: "RED-9a33-258e-ab18" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-9a33-819b-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-9a33-99d2-3-2147483648-3-55-0-55" status: RED message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "RED-8ac8-3-3-42" reason: "RED-8ac8-3-3-43" reason: "RED-8ac8-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "RED-8ac8-3-3-42" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/dl5p/002cc2/r3tmp/tmpCYE91O/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-43" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/dl5p/002cc2/r3tmp/tmpCYE91O/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-44" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/dl5p/002cc2/r3tmp/tmpCYE91O/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-9a33-819b-2147483648" status: RED message: "Group failed" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-9a33-99d2-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2024-11-21T23:09:56.644739Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:637:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:56.645348Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:56.645437Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:56.645829Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:635:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:56.646135Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:56.646301Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002cc2/r3tmp/tmp4Np71J/pdisk_1.dat 2024-11-21T23:09:57.162663Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21231, node 5 TClient is connected to server localhost:9237 2024-11-21T23:09:57.798469Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:57.798546Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:57.798593Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:57.799040Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:03.642004Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:287:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:03.642263Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:03.642422Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002cc2/r3tmp/tmp3taUv5/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18786, node 7 TClient is connected to server localhost:26370 2024-11-21T23:10:09.458096Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:200:2185], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:09.458447Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:09.458702Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002cc2/r3tmp/tmpmxV9kK/pdisk_1.dat 2024-11-21T23:10:09.899072Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13585, node 8 TClient is connected to server localhost:8163 2024-11-21T23:10:10.528048Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:10.528118Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:10.528169Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:10.529005Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> YdbProxy::ReadTopic >> YdbProxy::CreateTable >> YdbProxy::ListDirectory >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] >> YdbSdkSessionsPool::StressTestSync1 [FAIL] >> YdbProxy::DropTable >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> YdbProxy::CopyTable >> TVersions::Wreck0Reverse [GOOD] >> TYardTest::TestLogWriteCutEqualRandomWait [GOOD] >> TYardTest::TestLogWriteCutUnequal >> YdbProxy::RemoveDirectory ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 15777061018572834589 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2024-11-21T23:10:10.879978Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction >> Donor::SlayAfterWiping [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 8175261350302149202 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2024-11-21T23:10:11.015446Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:6308:827] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: 2024-11-21T23:09:36.917022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:36.932292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:36.933049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:36.933280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:36.935527Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:36.935605Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c28/r3tmp/tmpgTE3I3/pdisk_1.dat 2024-11-21T23:09:37.383667Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32633, node 1 TClient is connected to server localhost:10504 2024-11-21T23:09:37.861098Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:37.861164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:37.861211Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:37.862074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:46.150444Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:46.165658Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:46.166048Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:46.166687Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:46.168254Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:46.168559Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c28/r3tmp/tmpUwzkXy/pdisk_1.dat 2024-11-21T23:09:46.520239Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24211, node 3 TClient is connected to server localhost:18041 2024-11-21T23:09:47.200939Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:47.201007Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:47.201082Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:47.201571Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:55.511878Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:55.512756Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:55.513136Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:55.514229Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:55.527551Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:55.528035Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c28/r3tmp/tmp6u6fUf/pdisk_1.dat 2024-11-21T23:09:56.093611Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8021, node 5 TClient is connected to server localhost:15433 2024-11-21T23:09:56.657722Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:56.657792Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:56.657838Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:56.658301Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:04.961481Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:04.961875Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:04.961987Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:04.962620Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:04.963001Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:04.963103Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c28/r3tmp/tmpdeET1A/pdisk_1.dat 2024-11-21T23:10:05.410707Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15397, node 7 TClient is connected to server localhost:23395 2024-11-21T23:10:05.918673Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:05.918749Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:05.918800Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:05.919024Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:11.261626Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:11.261919Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:11.262052Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c28/r3tmp/tmp28DBPh/pdisk_1.dat 2024-11-21T23:10:11.649021Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25839, node 9 TClient is connected to server localhost:63769 2024-11-21T23:10:12.192870Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:12.192933Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:12.192970Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:12.193358Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> YdbProxy::MakeDirectory ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2024-11-21T23:09:33.715183Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1732230573715156 2024-11-21T23:09:34.429124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873666778115836:2063];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:34.447621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:34.583806Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873664873537699:2091];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:34.585037Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:34.926452Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:09:34.939183Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002a05/r3tmp/tmpNNJRgJ/pdisk_1.dat 2024-11-21T23:09:35.521047Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:35.536534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:35.573004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:35.573112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:35.583025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:35.583085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:35.585768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:35.586447Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:09:35.588675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18977, node 1 2024-11-21T23:09:35.886980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002a05/r3tmp/yandexO2TAJd.tmp 2024-11-21T23:09:35.887008Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002a05/r3tmp/yandexO2TAJd.tmp 2024-11-21T23:09:35.887161Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002a05/r3tmp/yandexO2TAJd.tmp 2024-11-21T23:09:35.887289Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:35.987208Z INFO: TTestServer started on Port 4747 GrpcPort 18977 TClient is connected to server localhost:4747 PQClient connected to localhost:18977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:36.628229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:09:36.750130Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:36.813245Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2024-11-21T23:09:39.392225Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873666778115836:2063];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:39.392341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:39.586166Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873664873537699:2091];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:39.586220Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:40.246667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873692547920618:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.246672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873690643341757:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.246786Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873690643341744:2283], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.246882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.254513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.258012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873692547920630:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.295037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T23:09:40.366752Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873690643341762:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T23:09:40.385921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T23:09:40.386153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873692547920632:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T23:09:40.672050Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873690643341805:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:40.673965Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGYwNTlkNDItNzg2YThiOC0xYTZjNGMzNy01MTM0Zjg5MQ==, ActorId: [2:7439873690643341722:2281], ActorState: ExecuteState, TraceId: 01jd8ft1ze4fmh8hc763bbfvj2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:40.672927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:09:40.675705Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873692547920738:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:40.677135Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGM1NjU1YjctNmIwMjQyNTQtNGY1ZjVmNTEtODJmYzI1MjE=, ActorId: [1:7439873692547920615:2304], ActorState: ExecuteState, TraceId: 01jd8ft1z51x8befh5e1jg73x4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:40.699239Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:40.699428Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:40.949533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:09:41.258634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: ... ghtTotal: 0 MessagesInflight: 0 } 2024-11-21T23:10:09.656061Z :INFO: [/Root] [/Root] [4cfce181-21f631cd-ec94d8d4-fd7aec23] Closing read session. Close timeout: 0.000000s 2024-11-21T23:10:09.656105Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2024-11-21T23:10:09.656145Z :INFO: [/Root] [/Root] [4cfce181-21f631cd-ec94d8d4-fd7aec23] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1633 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:10:09.656278Z :NOTICE: [/Root] [/Root] [4cfce181-21f631cd-ec94d8d4-fd7aec23] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:10:09.657288Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_4121440006770801301_v1 grpc read done: success# 1, data# { read { } } 2024-11-21T23:10:09.657385Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_4121440006770801301_v1 grpc closed 2024-11-21T23:10:09.657516Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_4121440006770801301_v1 is DEAD 2024-11-21T23:10:09.658845Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:10:09.658641Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439873811709791212:2491] disconnected; active server actors: 1 2024-11-21T23:10:09.658683Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439873811709791212:2491] client user disconnected session shared/user_3_1_4121440006770801301_v1 2024-11-21T23:10:09.658878Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_4121440006770801301_v1 2024-11-21T23:10:09.658907Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439873811709791215:2494] destroyed 2024-11-21T23:10:09.658958Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_4121440006770801301_v1 2024-11-21T23:10:10.373472Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439873820299726019:2526] TxId: 281474976710694. Ctx: { TraceId: 01jd8ftyv14cft8pfk2agb8c12, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTczOTYzY2ItMmRjNjlkMWEtNzg0NmI5NjMtODg4ZWM3ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2024-11-21T23:10:10.373843Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439873820299726031:2535], TxId: 281474976710694, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZTczOTYzY2ItMmRjNjlkMWEtNzg0NmI5NjMtODg4ZWM3ZWM=. TraceId : 01jd8ftyv14cft8pfk2agb8c12. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439873820299726019:2526], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:10:10.374161Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439873820299726032:2536], TxId: 281474976710694, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01jd8ftyv14cft8pfk2agb8c12. SessionId : ydb://session/3?node_id=3&id=ZTczOTYzY2ItMmRjNjlkMWEtNzg0NmI5NjMtODg4ZWM3ZWM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439873820299726019:2526], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:10:11.670620Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.670675Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.670712Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:10:11.671103Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:10:11.672289Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:10:11.675186Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.676327Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2024-11-21T23:10:11.678593Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.678631Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.678673Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:10:11.679320Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:10:11.679798Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:10:11.680384Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.680658Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:10:11.681887Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:10:11.683619Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2024-11-21T23:10:11.683721Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2024-11-21T23:10:11.683867Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:10:11.683910Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2024-11-21T23:10:11.683939Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2024-11-21T23:10:11.683993Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2024-11-21T23:10:11.686437Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.686473Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.686502Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:10:11.698706Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:10:11.710707Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:10:11.710937Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.711394Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:10:11.712304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.712503Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:10:11.712617Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:10:11.712680Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:10:11.712766Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2024-11-21T23:10:11.714829Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.714866Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.714906Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:10:11.715336Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:10:11.735350Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:10:11.735677Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:11.738728Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (empty maybe) 2024-11-21T23:10:11.739573Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:10:11.740021Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2024-11-21T23:10:11.740295Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2024-11-21T23:10:11.740389Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:10:11.740453Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:10:11.740495Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2024-11-21T23:10:11.740662Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:10:11.740696Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2024-11-21T23:10:13.744608Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:13.744664Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:13.744715Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:10:13.745125Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2024-11-21T23:10:13.745621Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2024-11-21T23:10:13.745765Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:13.747047Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:13.747729Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2024-11-21T23:10:13.747877Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2024-11-21T23:10:13.748058Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> BSCRestartPDisk::RestartOneByOne ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 3990913119447027137 2024-11-21T23:10:12.804882Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:12.809249Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16292141781824733179] 2024-11-21T23:10:12.832322Z 1 00h01m14.511024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> DataShardSnapshots::VolatileSnapshotSplit >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> DataShardSnapshots::LockedWriteReuseAfterCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.29001 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:38.739713Z 00000.008 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.016 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.016 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.016 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:5748446/8388608, Passive:131, MemLimit:0 Counters: Active:5748446/8388608, Passive:131, MemLimit:104857600 Counters: Active:5748446/6291456, Passive:131, MemLimit:6291456 00001.473 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.486 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 121 reqs hit {0 0b} miss {242 21422038b} 00001.486 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.486 II| FAKE_ENV: DS.0 gone, left {4715b, 8}, put {30358b, 308} 00001.487 II| FAKE_ENV: DS.1 gone, left {47655250b, 148}, put {47735244b, 314} 00001.524 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.524 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.524 II| FAKE_ENV: All BS storage groups are stopped 00001.524 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00001.529 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:40.279548Z 00000.008 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.012 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.013 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.013 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:5748446/8388608, Passive:131, MemLimit:0 00000.781 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 104857600 ReplacementPolicy: ThreeLeveledLRU Counters: Active:5748446/8388608, Passive:131, MemLimit:0 00000.781 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 2097152 ReplacementPolicy: ThreeLeveledLRU Counters: Active:5748446/2097152, Passive:131, MemLimit:0 00000.782 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 0 ReplacementPolicy: ThreeLeveledLRU Counters: Active:1028127/0, Passive:131, MemLimit:1048576 00000.782 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.793 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 121 reqs hit {0 0b} miss {242 21422038b} 00000.793 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.793 II| FAKE_ENV: DS.0 gone, left {4715b, 8}, put {30358b, 308} 00000.793 II| FAKE_ENV: DS.1 gone, left {47655250b, 148}, put {47735244b, 314} 00000.817 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.825 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.826 II| FAKE_ENV: All BS storage groups are stopped 00000.826 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.830 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:41.124797Z 00000.005 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.009 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.009 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.009 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted Counters: Active:8311925/8388608, Passive:131, MemLimit:0 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:8311925/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312827/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 Counters: Active:5747446/8388608, Passive:131, MemLimit:8388608 00001.197 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.206 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 651 reqs hit {0 0b} miss {654 57705781b} 00001.207 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.207 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00001.207 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00001.209 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.209 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.209 II| FAKE_ENV: All BS storage groups are stopped 00001.209 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00001.212 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:42.352275Z 00000.008 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.014 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: S3FIFO ...compacting ...waiting until compacted Counters: Active:8313376/8388608, Passive:131, MemLimit:0 Counters: Active:8313827/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 Counters: Active:8313376/8388608, Passive:131, MemLimit:8388608 Counters: Active:8313827/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8313827/8388608, Passive:131, MemLimit:8388608 00001.342 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.350 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 450 reqs hit {0 0b} miss {453 39564846b} 00001.351 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.351 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00001.351 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00001.353 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.353 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.353 II| FAKE_ENV: All BS storage groups are stopped 00001.353 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00001.356 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:43.734808Z 00000.011 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.011 II| FAKE_ENV: Starting storage for BS group 0 00000.012 II| FAKE_ENV: Starting storage for BS group 1 00000.012 II| FAKE_ENV: Starting storage for BS group 2 00000.012 II| FAKE_ENV: Starting storage for BS group 3 00000.015 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ClockPro 00000.015 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ClockPro 00000.015 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ClockPro ...compacting ...waiting until compacted Counters: Active:8210829/8388608, Passive:131, MemLimit:0 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312023/8388608, Passive:131, MemLimit:8388608 Counters: Active:8312474/8388608, Passive:131, MemLimit:8388608 00001.381 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.401 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 651 reqs hit {0 0b} miss {654 57297397b} 00001.403 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.403 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10315b, 111} 00001.403 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10308509b, 109} 00001.413 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.413 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.413 II| FAKE_ENV: All BS storage groups are stopped 00001.413 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.000s 00001.418 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 18}, stopped 00000.000 II| FAKE_ENV: Born at 2024-11-21T23:06:45.205623Z 00000.013 II| TABLET_SAUSAGECACHE: Bootstrapped with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.013 II| FAKE_ENV: Starting storage for BS group 0 00000.014 II| FAKE_ENV: Starting storage for BS group 1 00000.014 II| FAKE_ENV: Starting storage for BS group 2 00000.014 II| FAKE_ENV: Starting storage for BS group 3 00000.017 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.017 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU 00000.017 II| TABLET_SAUSAGECACHE: Replacement policy switch from S3FIFO to ThreeLeveledLRU ...compacting ...waiting until compacted 00000.258 II| TABLET_SAUSAGECACHE: Config updated MemoryLimit: 8388608 ReplacementPolicy: S3FIFO 00000.258 II| TABLET_SAUSAGECACHE: Replacement policy switch from ThreeLeveledLRU to S3FIFO 00000.258 II| TABLET_SAUSAGECACHE: Replacement policy switch from ThreeLeveledLRU to S3FIFO 00000.263 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.264 II| TABLET_SAUSAGECACHE: Page collection cache gone, serviced 12 reqs hit {0 0b} miss {13 616620b} 00000.264 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.264 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10191b, 107} 00000.264 II| FAKE_ENV: DS.1 gone, left {10257096b, 5}, put {10305919b, 107} 00000.266 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.266 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.266 II| FAKE_ENV: All BS storage groups are stopped 00000.266 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.266 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] Test command err: 2024-11-21T23:09:27.681113Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873638148993875:2251];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:27.681167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019d0/r3tmp/tmp1OO6zW/pdisk_1.dat 2024-11-21T23:09:28.394271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:28.394371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:28.398273Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:28.415048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24209, node 1 2024-11-21T23:09:28.591252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:28.591270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:28.591300Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:28.591411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:28.915848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:28.921161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:28.921261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:09:28.923558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:28.923887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:28.923902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:09:28.931950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:28.932578Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:28.932596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:09:28.935397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:28.943964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230568985, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:28.944012Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:28.944257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:28.946322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:28.946504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:28.946552Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:28.946635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:28.946670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:28.946721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:28.950045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:28.950086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:28.950103Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:28.950209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:09:32.127112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873655328863882:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:32.139726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:32.163051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:32.163498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:09:32.163983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:32.164001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:09:32.170723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table 2024-11-21T23:09:32.170897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:32.171077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:32.171134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:09:32.175760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:32.175808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:32.175825Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:09:32.176715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:32.176739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:32.176750Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 1 2024-11-21T23:09:32.194642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:09:32.232587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:09:32.232689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 2 -> 3 2024-11-21T23:09:32.247437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:09:32.331273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976710658:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:09:32.331304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:09:32.331387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 3 -> 128 2024-11-21T23:09:32.336713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:09:32.344160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230572387, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:32.344212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976710658:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732230572387 2024-11-21T23:09:32.344374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 129 2024-11-21T23:09:32.346669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:32.347002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:32.347056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976710658:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:09:32.349700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:09:32.349734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:09:32.349749Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId ... 11.244003Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:11.244187Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:11.244407Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:11.245852Z node 13 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037910 Status: COMPLETE TxId: 281474976715765 Step: 1732230611230 OrderId: 281474976715765 ExecLatency: 47 ProposeLatency: 57 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037910 CpuTimeUsec: 6198 } } 2024-11-21T23:10:11.246050Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:11.246212Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:11.246932Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:11.247400Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:11.248148Z node 13 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715765:2 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:10:11.248185Z node 13 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715765:2, at schemeshard: 72057594046644480 2024-11-21T23:10:11.248216Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715765:2 129 -> 240 2024-11-21T23:10:11.251257Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715765:2 ProgressState 2024-11-21T23:10:11.251405Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715765:2 progress is 3/3 2024-11-21T23:10:11.251479Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:0 2024-11-21T23:10:11.251619Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:1 2024-11-21T23:10:11.251638Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715765:2 2024-11-21T23:10:11.258138Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715765 2024-11-21T23:10:11.262071Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 281474976715762, at schemeshard: 72057594046644480 2024-11-21T23:10:11.264465Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TDropLock Propose: opId# 281474976715766:0, path# /Root/table 2024-11-21T23:10:11.264636Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715766:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:11.267689Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715766, database: /Root, subject: , status: StatusAccepted, operation: DROP LOCK, path: /Root/table 2024-11-21T23:10:11.267827Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976715766, status# StatusAccepted 2024-11-21T23:10:11.268064Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976715766:0 ProgressState 2024-11-21T23:10:11.271940Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715766, at schemeshard: 72057594046644480 2024-11-21T23:10:11.274103Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230611321, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:11.274151Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDropLock TPropose opId# 281474976715766:0 HandleReply TEvOperationPlan: step# 1732230611321 2024-11-21T23:10:11.274171Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715766:0 128 -> 240 2024-11-21T23:10:11.276703Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715766:0 ProgressState 2024-11-21T23:10:11.276862Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715766:0 progress is 1/1 2024-11-21T23:10:11.276945Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715766:0 2024-11-21T23:10:11.280945Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715766 2024-11-21T23:10:11.291189Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976715762 2024-11-21T23:10:11.862974Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7439873824641317180:2439] [0] Resolve database: name# /Root 2024-11-21T23:10:11.866027Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7439873824641317180:2439] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:10:11.866072Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7439873824641317180:2439] [0] Send request: schemeShardId# 72057594046644480 2024-11-21T23:10:11.874686Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7439873824641317180:2439] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:22887" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1732230610 } EndTime { seconds: 1732230611 } } 2024-11-21T23:10:11.896035Z node 13 :TX_PROXY DEBUG: actor# [13:7439873790281574962:2138] Handle TEvNavigate describe path /Root/table/byValue/indexImplTable 2024-11-21T23:10:11.896087Z node 13 :TX_PROXY DEBUG: Actor# [13:7439873824641317186:5137] HANDLE EvNavigateScheme /Root/table/byValue/indexImplTable 2024-11-21T23:10:11.896276Z node 13 :TX_PROXY DEBUG: Actor# [13:7439873824641317186:5137] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T23:10:11.896401Z node 13 :TX_PROXY DEBUG: Actor# [13:7439873824641317186:5137] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table/byValue/indexImplTable" Options { ReturnBoundaries: true ShowPrivateTable: true } 2024-11-21T23:10:11.897714Z node 13 :TX_PROXY DEBUG: Actor# [13:7439873824641317186:5137] Handle TEvDescribeSchemeResult Forward to# [13:7439873824641317184:2440] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 693 Record# Status: StatusSuccess Path: "/Root/table/byValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715764 CreateStep: 1732230611111 ParentPathId: 8 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "Value" Type: "Decimal(22,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 22 DecimalScale: 9 } IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Value" KeyColumnNames: "Key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 10 MaxPartitionsCount: 10 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 2 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 12 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> DataShardSnapshots::LockedWriteBulkUpsertConflict |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> YdbProxy::AlterTable [GOOD] >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> KqpErrors::ResolveTableError >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage |81.9%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2024-11-21T23:10:11.887773Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873823661420282:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:11.888484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001d3f/r3tmp/tmpb39m7K/pdisk_1.dat 2024-11-21T23:10:12.358495Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:12.375779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:12.375912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:12.380675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20614 TServer::EnableGrpc on GrpcPort 28879, node 1 2024-11-21T23:10:12.651651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:12.651677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:12.651686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:12.651806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:13.031033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:15.969638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:10:16.203014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> BSCRestartPDisk::RestartNotAllowed >> KqpErrors::ProposeError >> KqpErrors::ProposeResultLost_RwTx >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> BSCRestartPDisk::RestartOneByOneWithReconnects >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 7375600012614110012 2024-11-21T23:10:18.191154Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.191363Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.191474Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.191545Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.191622Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.191703Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.191809Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.192866Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.192964Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.193020Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.193085Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.193150Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.193203Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.193252Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.193355Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.193409Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.193444Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.193517Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.193550Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.193605Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.193663Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:18.195536Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.195626Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.195681Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.195764Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.195818Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.195867Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:18.195916Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut >> YdbProxy::MakeDirectory [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> YdbProxy::CopyTable [GOOD] >> YdbProxy::OAuthToken >> YdbProxy::CopyTables |81.9%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/ydb-public-sdk-cpp-client-ydb_topic-ut |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup >> YdbProxy::DescribeConsumer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] Test command err: 2024-11-21T23:09:37.521867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:37.540395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:37.541266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:37.541526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:37.544133Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:37.544223Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c47/r3tmp/tmpCGrzu8/pdisk_1.dat 2024-11-21T23:09:38.032119Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7271, node 1 TClient is connected to server localhost:8555 2024-11-21T23:09:38.600950Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:38.601008Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:38.601051Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:38.601397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:48.117120Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:48.135073Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:48.135372Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:48.138069Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:48.139637Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:48.139970Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c47/r3tmp/tmpuVTEWV/pdisk_1.dat 2024-11-21T23:09:48.783812Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4830, node 3 TClient is connected to server localhost:32065 2024-11-21T23:09:49.358209Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:49.358324Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:49.358381Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:49.358885Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:58.777456Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:58.778090Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:58.778375Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:58.782318Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:58.782712Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:58.783019Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c47/r3tmp/tmpd4q24u/pdisk_1.dat 2024-11-21T23:09:59.204753Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12728, node 5 TClient is connected to server localhost:3282 2024-11-21T23:09:59.858840Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:59.858912Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:59.858949Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:59.859394Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T23:10:10.867194Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:10.867694Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:10.867834Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:10.868709Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:10.869220Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:10.869352Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c47/r3tmp/tmpagzclv/pdisk_1.dat 2024-11-21T23:10:11.368994Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1258, node 7 TClient is connected to server localhost:13255 2024-11-21T23:10:12.034233Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:12.034308Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:12.034353Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:12.034617Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-7" reason: "YELLOW-9a33-e9e2-8" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 7 host: "::1" port: 12001 } 2024-11-21T23:10:17.968978Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:17.969381Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:17.969544Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c47/r3tmp/tmp3ydJqz/pdisk_1.dat 2024-11-21T23:10:18.370822Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25524, node 9 TClient is connected to server localhost:25709 2024-11-21T23:10:18.945353Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:18.945460Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:18.945519Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:18.946079Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> YdbProxy::DescribeTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2024-11-21T23:10:11.112205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873827017002284:2138];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:11.126432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001ded/r3tmp/tmpGqH4LM/pdisk_1.dat 2024-11-21T23:10:11.743630Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:11.793551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:11.793634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:11.800014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61083 TServer::EnableGrpc on GrpcPort 23251, node 1 2024-11-21T23:10:12.323254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:12.323271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:12.323276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:12.323356Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:12.936337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:12.969863Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:16.517959Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873847937729217:2183];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:16.518006Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001ded/r3tmp/tmp52DgHB/pdisk_1.dat 2024-11-21T23:10:16.836264Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:16.857620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:16.857696Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:16.873038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14994 TServer::EnableGrpc on GrpcPort 25020, node 2 2024-11-21T23:10:17.468809Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:17.468831Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:17.468837Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:17.468913Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:17.881833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:17.888063Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 3502236172738816689 2024-11-21T23:10:22.207534Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.207747Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.207829Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.207891Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.207951Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.208025Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.208097Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.208167Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.209231Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.209343Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.209404Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.209455Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.209509Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.209562Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.209609Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.209673Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.209756Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.209819Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.209863Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.209910Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.209952Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.210007Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.210060Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.210100Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2024-11-21T23:10:22.211969Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.212050Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.212094Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.212147Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.212223Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.212272Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.212330Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2024-11-21T23:10:22.212381Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> YdbProxy::CreateCdcStream [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup >> YdbProxy::DropTopic [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> DataShardSnapshots::LockedWriteBulkUpsertConflict [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic >> YdbProxy::StaticCreds [GOOD] >> YdbProxy::DescribeTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001d29/r3tmp/tmp7c99ns/pdisk_1.dat 2024-11-21T23:10:13.251529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:13.473222Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:13.479899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:13.480008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:13.491093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6502 TServer::EnableGrpc on GrpcPort 64200, node 1 2024-11-21T23:10:13.987091Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:13.987119Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:13.987128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:13.987230Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:14.580780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:14.593428Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:17.438001Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873850016449734:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001d29/r3tmp/tmpqvQOuU/pdisk_1.dat 2024-11-21T23:10:17.542771Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:17.678476Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:17.689114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:17.689194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:17.691089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12270 TServer::EnableGrpc on GrpcPort 12974, node 2 2024-11-21T23:10:18.224950Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:18.224985Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:18.224993Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:18.225098Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:18.807201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:18.819175Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:22.005287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:10:22.410857Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873850016449734:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:22.410952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> DataShardSnapshots::MvccSnapshotAndSplit |81.9%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T23:07:48.625171Z :WriteRAW INFO: Random seed for debugging is 1732230468625130 2024-11-21T23:07:49.403481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873214885105618:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:49.403535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:49.550934Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873217730694520:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:49.555650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:50.071270Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:50.085066Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00135b/r3tmp/tmpJhuwV1/pdisk_1.dat 2024-11-21T23:07:50.455141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:50.634562Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:50.821607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:50.821713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:50.836495Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:07:50.837232Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:50.837423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:50.907403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:50.907468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:50.922635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4439, node 1 2024-11-21T23:07:51.210512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/00135b/r3tmp/yandexy3zdFQ.tmp 2024-11-21T23:07:51.210573Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/00135b/r3tmp/yandexy3zdFQ.tmp 2024-11-21T23:07:51.215241Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/00135b/r3tmp/yandexy3zdFQ.tmp 2024-11-21T23:07:51.230700Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:07:51.385406Z INFO: TTestServer started on Port 17263 GrpcPort 4439 TClient is connected to server localhost:17263 PQClient connected to localhost:4439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:51.901007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:07:54.403966Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873214885105618:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:54.404036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:54.501138Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873217730694520:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:54.501210Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:55.275958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873243500498451:2283], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:55.276089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:55.282521Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873243500498478:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:55.302469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T23:07:55.354237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873243500498480:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T23:07:55.800991Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873243500498515:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:55.802705Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmI1YWQ3NDMtNDNhNDRhNDQtYzIxZjI5NDQtN2IxOWMyODE=, ActorId: [2:7439873243500498449:2282], ActorState: ExecuteState, TraceId: 01jd8fpvg93nfqm0k02v7chb8a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:55.804940Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:55.817542Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873240654910278:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:55.819082Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjVlMWU3NzQtMjU0YjA0MjQtZmQyNjFhMDktZmQyNWZiMzc=, ActorId: [1:7439873240654910227:2304], ActorState: ExecuteState, TraceId: 01jd8fpvmpdekhhbmp6k764d9j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:55.819984Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:55.820990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:07:56.036110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:07:56.394593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:4439", true, true, 1000); 2024-11-21T23:07:57.023296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8fpwtx8b4z10nhwr0nta5x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZhZmE5MTktYjgwNjg2MzAtZWZmYmRkOTItNWUyNGMzODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439873249244845342:3030] === CheckClustersList. Ok 2024-11-21T23:08:03.090348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:4439 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:08:03.534417Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:4439 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 ... ed no session 2024-11-21T23:10:18.758260Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873853622511463:2474] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T23:10:18.758313Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873853622511463:2474] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T23:10:18.758339Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873853622511463:2474] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:10:18.758386Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:10:18.761093Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2024-11-21T23:10:18.777263Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:18.782511Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7439873853622511545:2474], now have 1 active actors on pipe 2024-11-21T23:10:18.782639Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:10:18.782679Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:10:18.783008Z node 16 :PERSQUEUE INFO: new Cookie src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:10:18.785371Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0 2024-11-21T23:10:18.783151Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:10:18.783219Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:10:18.784462Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:10:18.784521Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:10:18.784652Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:10:18.806501Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732230618800 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:10:18.806702Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:10:18.809480Z :INFO: [] MessageGroupId [src] SessionId [src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0] Write session: close. Timeout = 0 ms 2024-11-21T23:10:18.809534Z :INFO: [] MessageGroupId [src] SessionId [src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0] Write session will now close 2024-11-21T23:10:18.809607Z :DEBUG: [] MessageGroupId [src] SessionId [src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0] Write session: aborting 2024-11-21T23:10:18.810304Z :INFO: [] MessageGroupId [src] SessionId [src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:10:18.810348Z :DEBUG: [] MessageGroupId [src] SessionId [src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0] Write session: destroy 2024-11-21T23:10:19.170505Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0 grpc read done: success: 0 data: 2024-11-21T23:10:19.170543Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0 grpc read failed 2024-11-21T23:10:19.170574Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0 grpc closed 2024-11-21T23:10:19.170597Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|978eb3fe-c8c8d97a-4ccb2fe6-5eecd970_0 is DEAD 2024-11-21T23:10:19.171319Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:10:19.259058Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:10:19.259128Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7439873853622511545:2474] destroyed 2024-11-21T23:10:19.259307Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:10:19.279223Z :INFO: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Starting read session 2024-11-21T23:10:19.279284Z :DEBUG: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Starting cluster discovery 2024-11-21T23:10:19.279542Z :INFO: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24551: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24551
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24551. " 2024-11-21T23:10:19.279594Z :DEBUG: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Restart cluster discovery in 0.008931s 2024-11-21T23:10:19.292093Z :DEBUG: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Starting cluster discovery 2024-11-21T23:10:19.292544Z :INFO: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24551: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24551
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24551. " 2024-11-21T23:10:19.292599Z :DEBUG: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Restart cluster discovery in 0.012629s 2024-11-21T23:10:19.314519Z :DEBUG: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Starting cluster discovery 2024-11-21T23:10:19.314749Z :INFO: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24551: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24551
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24551. " 2024-11-21T23:10:19.314800Z :DEBUG: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Restart cluster discovery in 0.035411s 2024-11-21T23:10:19.350677Z :DEBUG: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Starting cluster discovery 2024-11-21T23:10:19.351036Z :NOTICE: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24551: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24551
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24551. " } 2024-11-21T23:10:19.351255Z :NOTICE: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24551: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24551
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24551. " } 2024-11-21T23:10:19.351434Z :INFO: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Closing read session. Close timeout: 0.000000s 2024-11-21T23:10:19.351557Z :NOTICE: [/Root] [/Root] [e5099d97-307d468b-f34f137d-a00fd6f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:10:20.376711Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7439873862212446226:2499], TxId: 281474976715689, task: 1, CA Id [15:7439873862212446224:2499]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T23:10:20.421528Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7439873862212446226:2499], TxId: 281474976715689, task: 1, CA Id [15:7439873862212446224:2499]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:10:20.460565Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7439873862212446226:2499], TxId: 281474976715689, task: 1, CA Id [15:7439873862212446224:2499]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:10:20.508550Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7439873862212446226:2499], TxId: 281474976715689, task: 1, CA Id [15:7439873862212446224:2499]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:10:20.595822Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7439873862212446226:2499], TxId: 281474976715689, task: 1, CA Id [15:7439873862212446224:2499]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:10:20.628286Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715690. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:20.628416Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439873862212446236:2492] TxId: 281474976715690. Ctx: { TraceId: 01jd8fv8fabb8bgprv2nv7j7k3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YzA0ZDliNDktN2ZlYzIxLWI2NmQ4NWIzLWFlNDBjYzFi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:20.628817Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=YzA0ZDliNDktN2ZlYzIxLWI2NmQ4NWIzLWFlNDBjYzFi, ActorId: [15:7439873857917478898:2492], ActorState: ExecuteState, TraceId: 01jd8fv8fabb8bgprv2nv7j7k3, Create QueryResponse for error on request, msg: 2024-11-21T23:10:20.630408Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fv9857zp9q36n5gzjvee6" } } YdbStatus: UNAVAILABLE ConsumedRu: 514 } 2024-11-21T23:10:20.734554Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7439873862212446226:2499], TxId: 281474976715689, task: 1, CA Id [15:7439873862212446224:2499]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:10:20.949102Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7439873862212446226:2499], TxId: 281474976715689, task: 1, CA Id [15:7439873862212446224:2499]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:10:21.414265Z node 15 :KQP_COMPUTE WARN: SelfId: [15:7439873862212446226:2499], TxId: 281474976715689, task: 1, CA Id [15:7439873862212446224:2499]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> Donor::ConsistentWritesWhenSwitchingToDonorMode >> Donor::MultipleEvicts ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [FAIL] Test command err: 2024-11-21T23:09:57.373331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873765897314175:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:57.373376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001202/r3tmp/tmpJb4dYJ/pdisk_1.dat 2024-11-21T23:09:58.160778Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:58.171825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:58.171902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:58.199760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14906, node 1 2024-11-21T23:09:58.766595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:58.767895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:58.767909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:58.768040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:59.215916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.222191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:59.222303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.243289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:59.243558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:59.243590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:09:59.249312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:59.249345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:09:59.255521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.256599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:59.267686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230599309, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:59.267731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:59.272285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:59.282117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:59.282284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:59.282340Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:59.282449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:59.282482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:59.282534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:59.288800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:59.288883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:59.288915Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:59.289020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:10:02.377979Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873765897314175:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:02.378050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp:304, virtual void NTestSuiteYdbSdkSessionsPool::TTestCaseStressTestSync1::Execute_(NUnitTest::TTestContext &): (client.GetCurrentPoolSize() == activeSessionsLimit) failed: (0 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x17B77740) NTestSuiteYdbSdkSessionsPool::TTestCaseStressTestSync1::Execute_(NUnitTest::TTestContext&)+5023 (0x172E90FF) std::__y1::__function::__func, void ()>::operator()()+280 (0x17301548) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x17BB6659) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x17B7E2A9) NTestSuiteYdbSdkSessionsPool::TCurrentTest::Execute()+1204 (0x173002F4) NUnitTest::TTestFactory::Execute()+2438 (0x17B7FB76) NUnitTest::RunMain(int, char**)+5149 (0x17BB029D) ??+0 (0x7F2C240F9D90) __libc_start_main+128 (0x7F2C240F9E40) _start+41 (0x153CE029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2024-11-21T23:10:13.916128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873834787445250:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:13.916169Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001d22/r3tmp/tmp7fwWfV/pdisk_1.dat 2024-11-21T23:10:14.656193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:14.656319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:14.661248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:14.714499Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32610 TServer::EnableGrpc on GrpcPort 13172, node 1 2024-11-21T23:10:15.214333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:15.214365Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:15.214377Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:15.214548Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:15.763077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:15.786290Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:15.914543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:19.211985Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873861985156010:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:19.212041Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001d22/r3tmp/tmpIy7qbz/pdisk_1.dat 2024-11-21T23:10:19.468915Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:19.531527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:19.531601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:19.532918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19732 TServer::EnableGrpc on GrpcPort 15863, node 2 2024-11-21T23:10:20.082987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:20.083007Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:20.083018Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:20.083117Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:20.643824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:20.650009Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:10:20.807153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:10:20.832972Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2024-11-21T23:10:20.833004Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> YdbProxy::OAuthToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2024-11-21T23:10:13.815440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873834611549458:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:13.822831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001d0d/r3tmp/tmpIYF898/pdisk_1.dat 2024-11-21T23:10:14.323496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:14.323617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:14.324926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:14.344056Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10551 TServer::EnableGrpc on GrpcPort 6912, node 1 2024-11-21T23:10:14.703438Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:14.703461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:14.703468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:14.703580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:15.280338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:15.312538Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:17.686064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:10:18.827492Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873856966565683:2147];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:18.827573Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001d0d/r3tmp/tmpKaO4aH/pdisk_1.dat 2024-11-21T23:10:19.202798Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:19.230166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:19.234632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:19.243741Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28681 TServer::EnableGrpc on GrpcPort 28919, node 2 2024-11-21T23:10:19.642406Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:19.642429Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:19.642437Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:19.642556Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:20.094370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:22.663690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2024-11-21T23:10:14.626836Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873837493338543:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:14.626872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bdb/r3tmp/tmp1qYuoT/pdisk_1.dat 2024-11-21T23:10:15.402968Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:15.405513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:15.405598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:15.409882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22902 TServer::EnableGrpc on GrpcPort 22617, node 1 2024-11-21T23:10:15.745454Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:15.745477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:15.745484Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:15.745581Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:16.288132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:16.310539Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:16.402072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:10:19.101580Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873861138902981:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:19.102171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bdb/r3tmp/tmpmItB7T/pdisk_1.dat 2024-11-21T23:10:19.420369Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:19.439265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:19.439348Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:19.440525Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7183 TServer::EnableGrpc on GrpcPort 14290, node 2 2024-11-21T23:10:19.810990Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:19.811015Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:19.811024Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:19.811111Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:20.199350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:20.207313Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:10:20.239591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732230620246 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732230620246 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 ... (TRUNCATED) >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2024-11-21T23:10:14.092373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873839962088382:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:14.092739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001cf9/r3tmp/tmpx9Xno7/pdisk_1.dat 2024-11-21T23:10:14.612015Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:14.621852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:14.621939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:14.624268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10118 TServer::EnableGrpc on GrpcPort 17903, node 1 2024-11-21T23:10:14.978004Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:14.978031Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:14.978038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:14.978149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:15.575991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:18.028277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:10:18.229531Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T23:10:19.561939Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873861261109173:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001cf9/r3tmp/tmpspunGL/pdisk_1.dat 2024-11-21T23:10:19.783279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:19.899559Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:19.921173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:19.921265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:19.925958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29641 TServer::EnableGrpc on GrpcPort 11355, node 2 2024-11-21T23:10:20.402142Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:20.402166Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:20.402173Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:20.402265Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:10:20.936782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> THealthCheckTest::NoBscResponse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2024-11-21T23:10:15.169905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873843624491181:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:15.170152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bc3/r3tmp/tmpIfTrLk/pdisk_1.dat 2024-11-21T23:10:15.904496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:15.904599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:15.927871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:15.928787Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7276 TServer::EnableGrpc on GrpcPort 20004, node 1 2024-11-21T23:10:16.514839Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:16.514864Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:16.514871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:16.514960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:17.287768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:17.303140Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:17.358862Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:20.575257Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873865325350011:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:20.575302Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bc3/r3tmp/tmpaDEKZm/pdisk_1.dat 2024-11-21T23:10:20.890517Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:20.920940Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:20.921034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:20.930568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27001 TServer::EnableGrpc on GrpcPort 23246, node 2 2024-11-21T23:10:21.291012Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:21.291035Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:21.291041Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:21.291131Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:21.702161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:21.711364Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2024-11-21T23:09:38.282772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:38.304484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:38.305318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:38.305571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:38.308441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:38.308539Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c35/r3tmp/tmpuSAAHe/pdisk_1.dat 2024-11-21T23:09:38.769894Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22216, node 1 TClient is connected to server localhost:18877 2024-11-21T23:09:39.317243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:39.317309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:39.317361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:39.317717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:48.166439Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:48.194057Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:48.194466Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:48.194868Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:48.195815Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:48.195980Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c35/r3tmp/tmpxVJYyO/pdisk_1.dat 2024-11-21T23:09:48.534095Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28738, node 3 TClient is connected to server localhost:9980 2024-11-21T23:09:48.877468Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:48.877524Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:48.877565Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:48.877950Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:57.709655Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:57.710096Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:57.710272Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:57.711397Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:684:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:57.711865Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:57.711934Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c35/r3tmp/tmp9fgrxU/pdisk_1.dat 2024-11-21T23:09:58.061423Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32730, node 5 TClient is connected to server localhost:21211 2024-11-21T23:10:01.887009Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:01.887085Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:01.887179Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:01.887959Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:01.907855Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:01.908054Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:01.976374Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2024-11-21T23:10:01.977136Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-5" reason: "YELLOW-9a33-e9e2-6" reason: "YELLOW-9a33-e9e2-7" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2024-11-21T23:10:12.695016Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:339:2187], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:12.695592Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:12.695789Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:12.696909Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:751:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:12.697543Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:12.697651Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c35/r3tmp/tmpBUA748/pdisk_1.dat 2024-11-21T23:10:13.059893Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8492, node 8 TClient is connected to server localhost:4263 2024-11-21T23:10:17.822313Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:17.822486Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:17.822577Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:17.826166Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:17.836798Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1223:2663]) [8:1479:2667] 2024-11-21T23:10:17.837211Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2024-11-21T23:10:17.864784Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2024-11-21T23:10:17.864939Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T23:10:17.865259Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2024-11-21T23:10:17.865366Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T23:10:17.865629Z node 8 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2024-11-21T23:10:17.865722Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T23:10:17.865904Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T23:10:17.867893Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T23:10:17.879134Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([10:1449:2333]) ... y 1.000002048 2024-11-21T23:10:17.956837Z node 8 :HIVE DEBUG: HIVE#72057594037968897 [FBN] Finding best node for tablet PersQueue.72075186224037888.Leader.0 2024-11-21T23:10:17.956897Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 family {PersQueue.72075186224037888.Leader.0 Booting} 2024-11-21T23:10:17.956956Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 node 8 is not alive 2024-11-21T23:10:17.957064Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected usage 0.000003877 of node 10 2024-11-21T23:10:17.957118Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected nodes count 1 2024-11-21T23:10:17.957181Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2024-11-21T23:10:17.957256Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 10 2024-11-21T23:10:17.957344Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 10) 2024-11-21T23:10:17.957420Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2024-11-21T23:10:17.957573Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2024-11-21T23:10:17.957684Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2024-11-21T23:10:17.957789Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T23:10:17.957931Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2024-11-21T23:10:17.958205Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 10 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.064024Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:10:17.983928Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2024-11-21T23:10:17.984046Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T23:10:17.984150Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [10:1448:2333]} 2024-11-21T23:10:17.986660Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 10 Cookie 72075186224037888 2024-11-21T23:10:18.189028Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2024-11-21T23:10:18.189226Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [10:1448:2333] 2024-11-21T23:10:18.189337Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 10) 2024-11-21T23:10:18.189419Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2024-11-21T23:10:18.189594Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2024-11-21T23:10:18.189719Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2024-11-21T23:10:18.189810Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2024-11-21T23:10:18.189940Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2024-11-21T23:10:18.190059Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2024-11-21T23:10:18.190228Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:10:18.190288Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2024-11-21T23:10:18.190779Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2024-11-21T23:10:18.190930Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T23:10:18.191027Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T23:10:18.191119Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T23:10:18.231299Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [8:1222:2662]} 2024-11-21T23:10:18.231444Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T23:10:22.732639Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 10: Status: 2 2024-11-21T23:10:22.732774Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Execute 2024-11-21T23:10:22.732838Z node 8 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T23:10:22.732961Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2024-11-21T23:10:22.733272Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2024-11-21T23:10:22.733412Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 10) 2024-11-21T23:10:22.733479Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2024-11-21T23:10:22.733612Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2024-11-21T23:10:22.733731Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2024-11-21T23:10:22.733812Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 10 2024-11-21T23:10:22.733890Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2024-11-21T23:10:22.733940Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T23:10:22.733983Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2024-11-21T23:10:22.734478Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(10)::Execute 2024-11-21T23:10:22.734600Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:10:22.734652Z node 8 :HIVE TRACE: Node(10) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2024-11-21T23:10:22.734715Z node 8 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 10) 2024-11-21T23:10:22.734774Z node 8 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [8:1519:2672] 2024-11-21T23:10:22.734837Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(10): waiting 3600.000000s 2024-11-21T23:10:22.737044Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([10:1449:2333]) [8:1519:2672] 2024-11-21T23:10:22.740696Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1890:2691]) [8:1891:2696] 2024-11-21T23:10:22.746913Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([8:1890:2691]) [8:1891:2696] 2024-11-21T23:10:22.750060Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([11:1863:2333]) [8:1927:2699] 2024-11-21T23:10:22.761253Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [11:1862:2333] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T23:10:22.761398Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(11)::Execute 2024-11-21T23:10:22.761544Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:22.761587Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T23:10:22.761631Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T23:10:22.761673Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T23:10:22.761711Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2024-11-21T23:10:22.761807Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:22.772377Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 11 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-9a33-f489" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-9a33-6fa7" reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "RED-9a33-6fa7" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-9a33-e5e3-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-10" reason: "YELLOW-9a33-e9e2-11" reason: "YELLOW-9a33-e9e2-8" reason: "YELLOW-9a33-e9e2-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-9a33-e5e3-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 8 host: "::1" port: 12001 } >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks >> TKeyValueTest::TestWriteReadPatchRead |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |81.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> Donor::MultipleEvicts [GOOD] >> BasicUsage::BrokenCredentialsProvider [GOOD] >> YdbProxy::ReadNonExistentTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2024-11-21T23:10:27.448975Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T23:10:27.449070Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1732230627448 ErrorReason# 2024-11-21T23:10:27.458531Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T23:10:27.458636Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1732230627458 ErrorReason# 2024-11-21T23:10:27.464864Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T23:10:27.464955Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1732230627464 ErrorReason# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoBscResponse [GOOD] Test command err: 2024-11-21T23:09:36.520774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:36.533318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:36.533842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:36.534019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:36.536146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:36.536205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c81/r3tmp/tmpjjUug0/pdisk_1.dat 2024-11-21T23:09:37.054741Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9151, node 1 TClient is connected to server localhost:31765 2024-11-21T23:09:37.827841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:37.827905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:37.827956Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:37.828343Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:46.094326Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:430:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:09:46.109785Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:433:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:46.110121Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:46.110584Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:46.111854Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:46.112079Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c81/r3tmp/tmpV0nVcj/pdisk_1.dat 2024-11-21T23:09:46.452260Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63516, node 3 TClient is connected to server localhost:19934 2024-11-21T23:09:46.851969Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:46.852042Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:46.852111Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:46.852626Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:54.907219Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:567:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:54.907784Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:54.908028Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:54.908741Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:565:2259], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:09:54.909054Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:09:54.909306Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c81/r3tmp/tmpbdGFU8/pdisk_1.dat 2024-11-21T23:09:55.407469Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8690, node 5 TClient is connected to server localhost:24243 2024-11-21T23:09:55.927225Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:55.927299Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:55.927340Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:55.927825Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:05.679040Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:632:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:05.679541Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:05.679672Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:05.680476Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:630:2324], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:05.680956Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:05.681085Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c81/r3tmp/tmpxuphaJ/pdisk_1.dat 2024-11-21T23:10:06.209794Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5884, node 7 TClient is connected to server localhost:18259 2024-11-21T23:10:07.081749Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:07.081938Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:07.081998Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:07.082271Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:13.667242Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:13.667559Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:13.667700Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c81/r3tmp/tmpzwqzvA/pdisk_1.dat 2024-11-21T23:10:14.087901Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61661, node 9 TClient is connected to server localhost:27617 2024-11-21T23:10:14.967596Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:14.967668Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:14.967715Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:14.968354Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:15.059150Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:15.059368Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:15.084173Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected >> TxUsage::WriteToTopic_Demo_1 >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 14355990006049190258 0 donors: 2024-11-21T23:10:26.727204Z 15 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:26.729276Z 15 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:26.755748Z 15 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 13:1000 2024-11-21T23:10:26.846429Z 13 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:26.848665Z 13 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:26.870703Z 13 00h00m20.012048s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 15:1000 2024-11-21T23:10:26.936955Z 15 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:26.939232Z 15 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:26.951003Z 15 00h00m20.013072s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 13:1000 2024-11-21T23:10:27.013423Z 13 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:27.015457Z 13 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:27.027134Z 13 00h00m20.014096s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 15:1000 2024-11-21T23:10:27.093873Z 15 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:27.095912Z 15 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:27.108202Z 15 00h00m20.015120s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 13:1000 2024-11-21T23:10:27.174122Z 13 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:27.176244Z 13 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:27.188033Z 13 00h00m20.016144s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 15:1000 2024-11-21T23:10:27.290585Z 15 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:27.292754Z 15 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:27.311738Z 15 00h00m20.017168s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 13:1000 2024-11-21T23:10:27.397860Z 13 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:27.400123Z 13 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:27.416610Z 13 00h00m20.018192s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 15:1000 2024-11-21T23:10:27.483889Z 15 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:27.485896Z 15 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12458581609588385654] 2024-11-21T23:10:27.498670Z 15 00h00m20.019216s :BS_SYNCER ERROR: VDISK[82000000:_:1:1:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 13:1000 >> TxUsage::WriteToTopic_Demo_2 >> Trace::SkipSpaces [GOOD] >> Trace::NextToken [GOOD] >> Trace::TTraceEvent [GOOD] >> Trace::TExpectedTraceEvent [GOOD] >> Trace::TExpectedTrace [GOOD] >> TSettingsValidation::ValidateSettingsFailOnStart >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TPersQueueTest::LOGBROKER_7820 [GOOD] >> TPersQueueTest::InflightLimit >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> LocalPartition::Restarts >> BSCRestartPDisk::RestartNotAllowed [GOOD] >> BasicUsage::WriteRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2024-11-21T23:10:13.730034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873835683716313:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:13.732526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001cc5/r3tmp/tmpWew55d/pdisk_1.dat 2024-11-21T23:10:14.186039Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:14.192982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:14.193090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:14.196539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13212 TServer::EnableGrpc on GrpcPort 32480, node 1 2024-11-21T23:10:14.519467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:14.519491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:14.519498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:14.519617Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:15.020015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:15.293512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2024-11-21T23:10:17.276923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873852863586346:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:17.277063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:17.281328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873852863586361:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:17.281749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873852863586362:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:17.290418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2024-11-21T23:10:17.305787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873852863586369:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T23:10:17.305842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873852863586368:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T23:10:18.503323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2024-11-21T23:10:18.784389Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873835683716313:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:18.784665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:10:19.098015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:10:19.854317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:10:20.704383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:10:21.345174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T23:10:24.130376Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873881210518301:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:24.130439Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001cc5/r3tmp/tmpotjnfu/pdisk_1.dat 2024-11-21T23:10:24.310819Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:24.333089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:24.333172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:24.334911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20892 TServer::EnableGrpc on GrpcPort 26949, node 2 2024-11-21T23:10:24.662961Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:24.662984Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:24.662993Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:24.663088Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:24.965631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:24.979244Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> KqpErrors::ResolveTableError [GOOD] >> TxUsage::WriteToTopic_Demo_4 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2024-11-21T23:09:35.074071Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1732230575074031 2024-11-21T23:09:35.498985Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873671597682118:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:35.499033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:35.583498Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873672581392521:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:35.600477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:35.823308Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:09:35.838017Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029d9/r3tmp/tmpt7Lza1/pdisk_1.dat 2024-11-21T23:09:36.297012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:36.297199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:36.303354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:36.303453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:36.309021Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:09:36.313604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:36.314940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:36.380183Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25667, node 1 2024-11-21T23:09:36.430382Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:09:36.444117Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:09:36.573092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0029d9/r3tmp/yandexl01oT2.tmp 2024-11-21T23:09:36.573116Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0029d9/r3tmp/yandexl01oT2.tmp 2024-11-21T23:09:36.573259Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0029d9/r3tmp/yandexl01oT2.tmp 2024-11-21T23:09:36.573354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:36.667535Z INFO: TTestServer started on Port 26857 GrpcPort 25667 TClient is connected to server localhost:26857 PQClient connected to localhost:25667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:36.996268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:09:39.939017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873688777552297:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:39.939168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873688777552309:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:39.939244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:39.944488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:09:40.076918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873688777552312:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:09:40.499720Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873694056229183:2287], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:40.501339Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWYyYjlkOWItNDRkYjQxNDktYmU5ZGM2Y2QtNDdkNGJiMTk=, ActorId: [2:7439873694056229142:2280], ActorState: ExecuteState, TraceId: 01jd8ft21q4bk3hnbn201j5y41, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:40.505720Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:40.513275Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873693072519704:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:40.514838Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873671597682118:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:40.514961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:40.515172Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjY2NjViZi05NDJkZDU2YS0xMWMxMmU3Yy1kZTQwODEyMQ==, ActorId: [1:7439873688777552295:2303], ActorState: ExecuteState, TraceId: 01jd8ft1pf2jt82gakbgxvew0e, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:40.515799Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:40.519023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:09:40.586510Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873672581392521:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:40.586576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:40.761138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:09:40.967391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:25667", true, true, 1000); 2024-11-21T23:09:41.473985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8ft2wd03th7cttcp4wy6gx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjc3NGJjY2YtYTEwZTg5MTMtZTRlZWY2NDgtZTFhODcwZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439873697367487433:2999] === CheckClustersList. Ok 2024-11-21T23:09:47.931789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:25667 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:09:48.074744Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:25667 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: ... 22:2471] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T23:10:25.360228Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439873883977569873:2471] connected; active server actors: 1 2024-11-21T23:10:25.360514Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873879682602522:2471] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T23:10:25.360558Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873879682602522:2471] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T23:10:25.362483Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439873883977569873:2471] disconnected; active server actors: 1 2024-11-21T23:10:25.362507Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7439873883977569873:2471] disconnected no session 2024-11-21T23:10:25.639159Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:25.639229Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7439873883977569922:2471], now have 1 active actors on pipe 2024-11-21T23:10:25.627718Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873879682602522:2471] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T23:10:25.627767Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873879682602522:2471] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T23:10:25.627789Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7439873879682602522:2471] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:10:25.627823Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:10:25.649603Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2024-11-21T23:10:25.650953Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:10:25.651014Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:10:25.651329Z node 6 :PERSQUEUE INFO: new Cookie src|a0de0956-69af4eb6-8f35671e-93d99ad9_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:10:25.651464Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:10:25.651544Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:10:25.666835Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:10:25.666883Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:10:25.666997Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:10:25.669731Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732230625669 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:10:25.669850Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|a0de0956-69af4eb6-8f35671e-93d99ad9_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:10:25.668640Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|a0de0956-69af4eb6-8f35671e-93d99ad9_0 2024-11-21T23:10:25.670475Z :INFO: [] MessageGroupId [src] SessionId [src|a0de0956-69af4eb6-8f35671e-93d99ad9_0] Write session: close. Timeout = 0 ms 2024-11-21T23:10:25.670520Z :INFO: [] MessageGroupId [src] SessionId [src|a0de0956-69af4eb6-8f35671e-93d99ad9_0] Write session will now close 2024-11-21T23:10:25.670566Z :DEBUG: [] MessageGroupId [src] SessionId [src|a0de0956-69af4eb6-8f35671e-93d99ad9_0] Write session: aborting 2024-11-21T23:10:25.671050Z :INFO: [] MessageGroupId [src] SessionId [src|a0de0956-69af4eb6-8f35671e-93d99ad9_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:10:25.671085Z :DEBUG: [] MessageGroupId [src] SessionId [src|a0de0956-69af4eb6-8f35671e-93d99ad9_0] Write session: destroy 2024-11-21T23:10:25.674545Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|a0de0956-69af4eb6-8f35671e-93d99ad9_0 grpc read done: success: 0 data: 2024-11-21T23:10:25.674569Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a0de0956-69af4eb6-8f35671e-93d99ad9_0 grpc read failed 2024-11-21T23:10:25.675189Z node 5 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|a0de0956-69af4eb6-8f35671e-93d99ad9_0 2024-11-21T23:10:25.675216Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a0de0956-69af4eb6-8f35671e-93d99ad9_0 is DEAD 2024-11-21T23:10:25.675542Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:10:25.764286Z :INFO: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] Starting read session 2024-11-21T23:10:25.764350Z :DEBUG: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] Starting session to cluster null (localhost:61586) 2024-11-21T23:10:25.766185Z :DEBUG: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:25.766227Z :DEBUG: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:25.766266Z :DEBUG: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] [null] Reconnecting session to cluster null in 0.000000s 2024-11-21T23:10:25.795742Z :ERROR: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2024-11-21T23:10:25.795818Z :DEBUG: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:25.795862Z :DEBUG: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:10:25.796000Z :INFO: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2024-11-21T23:10:25.796216Z :NOTICE: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:10:25.796256Z :DEBUG: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2024-11-21T23:10:25.796366Z :INFO: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] Closing read session. Close timeout: 0.000000s 2024-11-21T23:10:25.796407Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:10:25.796447Z :INFO: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] Counters: { Errors: 1 CurrentSessionLifetimeMs: 32 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:10:25.796562Z :NOTICE: [/Root] [/Root] [6ea80aa3-37333288-ffd4e5ac-52c649e9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:10:25.807096Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:10:25.807163Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439873883977569922:2471] destroyed 2024-11-21T23:10:25.807307Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:10:26.179252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:10:26.179289Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:26.365048Z node 5 :KQP_EXECUTER ERROR: ActorId: [5:7439873888272537257:2483] TxId: 281474976710687. Ctx: { TraceId: 01jd8fve9n5y1e6fkc74hq0ds3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDM5MzQ3YjItNjFhZDczM2YtYzJjYmJiZWUtZDQ5MjNjOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 6 2024-11-21T23:10:26.365390Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7439873888272537263:2496], TxId: 281474976710687, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd8fve9n5y1e6fkc74hq0ds3. SessionId : ydb://session/3?node_id=5&id=NDM5MzQ3YjItNjFhZDczM2YtYzJjYmJiZWUtZDQ5MjNjOWY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [5:7439873888272537257:2483], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:10:26.365607Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7439873888272537265:2497], TxId: 281474976710687, task: 4. Ctx: { SessionId : ydb://session/3?node_id=5&id=NDM5MzQ3YjItNjFhZDczM2YtYzJjYmJiZWUtZDQ5MjNjOWY=. TraceId : 01jd8fve9n5y1e6fkc74hq0ds3. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [5:7439873888272537257:2483], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:10:26.838022Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710688. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:26.838189Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7439873888272537282:2498] TxId: 281474976710688. Ctx: { TraceId: 01jd8fvfaxc24b8vnw6d2v1apx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NTE1ZWM1MGQtN2FlZGVkMGYtZDhjOGM0MS05Y2U5MjA0Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:26.838637Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NTE1ZWM1MGQtN2FlZGVkMGYtZDhjOGM0MS05Y2U5MjA0Yg==, ActorId: [5:7439873888272537279:2498], ActorState: ExecuteState, TraceId: 01jd8fvfaxc24b8vnw6d2v1apx, Create QueryResponse for error on request, msg: 2024-11-21T23:10:26.842999Z node 5 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fvfaxc24b8vnw6ewg7414" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> DataShardSnapshots::LockedWriteDistributedCommitSuccess [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze >> YdbProxy::AlterTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 16212863701970184963 >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TTxAllocatorClientTest::ZeroRange [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2024-11-21T23:10:24.756331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:24.757063Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0010f0/r3tmp/tmpxQU4aG/pdisk_1.dat 2024-11-21T23:10:25.196407Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:25.512016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:25.641207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:25.641353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:25.659466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:25.659614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:25.681762Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:10:25.682501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:25.682929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:26.144286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:28.029376Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T23:10:28.029457Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T23:10:28.029519Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:28.029575Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T23:10:28.029652Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T23:10:28.045779Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T23:10:28.056424Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 300.000000s, cancelAfter: (empty maybe) 2024-11-21T23:10:28.056517Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T23:10:28.056579Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:28.056637Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T23:10:28.056700Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T23:10:28.057402Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T23:10:28.057888Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 0. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2024-11-21T23:10:28.057985Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2024-11-21T23:10:28.058113Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2024-11-21T23:10:28.058220Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:28.058645Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2024-11-21T23:10:28.058852Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T23:10:28.058981Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T23:10:28.059289Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] will be executed on 1 shards. 2024-11-21T23:10:28.059435Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T23:10:28.059873Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2024-11-21T23:10:28.059935Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T23:10:28.070504Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1476 RawX2: 4294970201 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\010\000\000\000\000\006\002\002\004\004\006\006" Rows: 3 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\001\000\000\000" KeyPoints: "\001\000\004\000\000\000\002\000\000\000" KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=" } RequestContext { key: "TraceId" value: "01jd8fvg7s4hggpc0evh9przxj" } EnableSpilling: false 2024-11-21T23:10:28.070834Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2024-11-21T23:10:28.070991Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T23:10:28.071143Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T23:10:28.071199Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2024-11-21T23:10:28.071262Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T23:10:28.071363Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T23:10:28.071430Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T23:10:28.120943Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T23:10:28.121138Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2024-11-21T23:10:28.121199Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2024-11-21T23:10:28.121276Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1476:2905] TxId: 281474976715658. Ctx: { TraceId: 01jd8fvg7s4hggpc0evh9przxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjEwZTk4YWEtYzg4ZGI0YTItOGFmN2JiMDgtM2JmY2I2ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T23:10:28.175462Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1491:2924], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2024-11-21T23:10:28.177894Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGVmZDU5YzEtNDA3Y2FjZC1hZGFkMDJmZi01ZjIwZTllMw==, ActorId: [1:1489:2922], ActorState: ExecuteState, TraceId: 01jd8fvgrxdm47ryctvw4bcba1, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2024-11-21T23:10:14.241027Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873838533685397:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:14.241075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bef/r3tmp/tmpw9K50t/pdisk_1.dat 2024-11-21T23:10:14.971936Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:14.991128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:14.991223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:14.995701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15627 TServer::EnableGrpc on GrpcPort 17127, node 1 2024-11-21T23:10:15.670303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:15.670324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:15.670329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:15.670437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:16.375284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:16.398772Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:19.246671Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873838533685397:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:19.246771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:10:19.423706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:10:20.507077Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873863621087039:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:20.518294Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bef/r3tmp/tmptHKRyy/pdisk_1.dat 2024-11-21T23:10:20.854738Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:20.871281Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:20.871372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:20.872789Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26489 TServer::EnableGrpc on GrpcPort 25344, node 2 2024-11-21T23:10:21.163937Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:21.163960Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:21.163971Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:21.164063Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:10:21.603805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:21.612295Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:24.643878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:10:24.720016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:10:25.850039Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439873884358009136:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:25.850190Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001bef/r3tmp/tmpSmNhzS/pdisk_1.dat 2024-11-21T23:10:26.016498Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:26.046835Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:26.046944Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:26.049730Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23154 TServer::EnableGrpc on GrpcPort 23679, node 3 2024-11-21T23:10:26.379215Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:26.379239Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:26.379249Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:26.379358Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:26.745802Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:26.886810Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> DataShardSnapshots::LockedWriteDistributedCommitAborted [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T23:07:55.130801Z :WriteRAW INFO: Random seed for debugging is 1732230475130757 2024-11-21T23:07:55.818127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873239644178257:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:55.818165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:56.018377Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873246809005282:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:56.018433Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:56.535697Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:56.559076Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001026/r3tmp/tmpEcU5GG/pdisk_1.dat 2024-11-21T23:07:56.859601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:57.082779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:57.199539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:57.199638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:57.202642Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:07:57.222521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:57.287740Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:57.304134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:57.304224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 65347, node 1 2024-11-21T23:07:57.320129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:57.486992Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:57.518591Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:57.576196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001026/r3tmp/yandexE6xg11.tmp 2024-11-21T23:07:57.576222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001026/r3tmp/yandexE6xg11.tmp 2024-11-21T23:07:57.576359Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001026/r3tmp/yandexE6xg11.tmp 2024-11-21T23:07:57.576479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:07:57.825642Z INFO: TTestServer started on Port 22341 GrpcPort 65347 TClient is connected to server localhost:22341 PQClient connected to localhost:65347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:58.536521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:08:00.821510Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873239644178257:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:00.821621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:01.018582Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873246809005282:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:01.018643Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:03.236312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873274003917669:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:03.236508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:03.237062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873274003917689:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:03.249144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:08:03.341650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:08:03.343083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873274003917691:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:08:04.028479Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873276873776630:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:08:04.028377Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873274003917783:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:08:04.030929Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjEwODQ5ZjAtZTg0MjZiNzMtOTZkOWQzOTktZDZhZDI0MQ==, ActorId: [1:7439873274003917649:2306], ActorState: ExecuteState, TraceId: 01jd8fq37a10waxanz3xrqgsm9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:08:04.032678Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:08:04.036152Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2RjM2Q1ZGMtODY4ODhhNi1lMTA3OWMyZi00ZjE1NDNlYg==, ActorId: [2:7439873276873776581:2285], ActorState: ExecuteState, TraceId: 01jd8fq3pkf5ngmmqnk1f56h0s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:08:04.036685Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:08:04.037785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:08:04.341622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:08:04.631911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:65347", true, true, 1000); 2024-11-21T23:08:05.601515Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8fq4znavdtmdec1nq2px1z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM5OGViYTUtNDY5YzJlZDYtYTgwY2RjY2QtNWY4MmIyYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439873282593852880:3070] 2024-11-21T23:08:12.243354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:08:12.243397Z node 1 :IMPORT WARN: Table profiles were not loaded === CheckClustersList. Ok 2024-11-21T23:08:12.603939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:6 ... AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T23:10:25.889304Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873884685919273:2479] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T23:10:25.906231Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:10:25.906262Z node 15 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:25.908580Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873884685919273:2479] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T23:10:26.243370Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873884685919273:2479] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T23:10:26.247860Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439873888980886628:2479] connected; active server actors: 1 2024-11-21T23:10:26.247930Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873884685919273:2479] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T23:10:26.247967Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873884685919273:2479] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T23:10:26.248551Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439873888980886628:2479] disconnected; active server actors: 1 2024-11-21T23:10:26.248582Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7439873888980886628:2479] disconnected no session 2024-11-21T23:10:26.506465Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873884685919273:2479] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T23:10:26.506516Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873884685919273:2479] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T23:10:26.506541Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873884685919273:2479] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:10:26.506587Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:10:26.509206Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2024-11-21T23:10:26.509115Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:26.509186Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7439873888980886653:2479], now have 1 active actors on pipe 2024-11-21T23:10:26.510604Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:10:26.510663Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:10:26.518854Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|a0a22105-68ae95d3-1dd0ef62-54af8619_0 2024-11-21T23:10:26.521178Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732230626521 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:10:26.521334Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|a0a22105-68ae95d3-1dd0ef62-54af8619_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:10:26.521565Z :INFO: [] MessageGroupId [src] SessionId [src|a0a22105-68ae95d3-1dd0ef62-54af8619_0] Write session: close. Timeout = 0 ms 2024-11-21T23:10:26.521603Z :INFO: [] MessageGroupId [src] SessionId [src|a0a22105-68ae95d3-1dd0ef62-54af8619_0] Write session will now close 2024-11-21T23:10:26.521648Z :DEBUG: [] MessageGroupId [src] SessionId [src|a0a22105-68ae95d3-1dd0ef62-54af8619_0] Write session: aborting 2024-11-21T23:10:26.522123Z :INFO: [] MessageGroupId [src] SessionId [src|a0a22105-68ae95d3-1dd0ef62-54af8619_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:10:26.522166Z :DEBUG: [] MessageGroupId [src] SessionId [src|a0a22105-68ae95d3-1dd0ef62-54af8619_0] Write session: destroy 2024-11-21T23:10:26.523143Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|a0a22105-68ae95d3-1dd0ef62-54af8619_0 grpc read done: success: 0 data: 2024-11-21T23:10:26.523179Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a0a22105-68ae95d3-1dd0ef62-54af8619_0 grpc read failed 2024-11-21T23:10:26.523212Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a0a22105-68ae95d3-1dd0ef62-54af8619_0 grpc closed 2024-11-21T23:10:26.523252Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a0a22105-68ae95d3-1dd0ef62-54af8619_0 is DEAD 2024-11-21T23:10:26.524452Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:10:26.510863Z node 16 :PERSQUEUE INFO: new Cookie src|a0a22105-68ae95d3-1dd0ef62-54af8619_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:10:26.511050Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:10:26.511120Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:10:26.513108Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:10:26.513136Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:10:26.514488Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:10:26.529164Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:10:26.529213Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7439873888980886653:2479] destroyed 2024-11-21T23:10:26.529274Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:10:26.634466Z :INFO: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Starting read session 2024-11-21T23:10:26.634541Z :DEBUG: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Starting cluster discovery 2024-11-21T23:10:26.634831Z :INFO: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4834: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4834
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4834. " 2024-11-21T23:10:26.634894Z :DEBUG: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Restart cluster discovery in 0.009271s 2024-11-21T23:10:26.655074Z :DEBUG: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Starting cluster discovery 2024-11-21T23:10:26.658172Z :INFO: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4834: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4834
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4834. " 2024-11-21T23:10:26.664300Z :DEBUG: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Restart cluster discovery in 0.015303s 2024-11-21T23:10:26.682689Z :DEBUG: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Starting cluster discovery 2024-11-21T23:10:26.682895Z :INFO: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4834: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4834
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4834. " 2024-11-21T23:10:26.682940Z :DEBUG: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Restart cluster discovery in 0.027617s 2024-11-21T23:10:26.718498Z :DEBUG: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Starting cluster discovery 2024-11-21T23:10:26.729653Z :NOTICE: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4834: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4834
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4834. " } 2024-11-21T23:10:26.738562Z :NOTICE: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4834: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4834
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4834. " } 2024-11-21T23:10:26.738791Z :INFO: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Closing read session. Close timeout: 0.000000s 2024-11-21T23:10:26.738952Z :NOTICE: [/Root] [/Root] [f8d93609-e6dc795b-a94fc0c8-bef896fa] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:10:27.703190Z node 15 :KQP_EXECUTER ERROR: ActorId: [15:7439873893275854020:2497] TxId: 281474976710687. Ctx: { TraceId: 01jd8fvfrj5skxrqsm84mpaw6z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MmIzN2Q4M2EtZjk5YjViODAtYmQ2MGIwZDgtZGI5OGY5YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 16 2024-11-21T23:10:27.703644Z node 15 :KQP_COMPUTE ERROR: SelfId: [15:7439873893275854032:2507], TxId: 281474976710687, task: 2. Ctx: { SessionId : ydb://session/3?node_id=15&id=MmIzN2Q4M2EtZjk5YjViODAtYmQ2MGIwZDgtZGI5OGY5YQ==. TraceId : 01jd8fvfrj5skxrqsm84mpaw6z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [15:7439873893275854020:2497], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:10:27.703700Z node 15 :KQP_COMPUTE ERROR: SelfId: [15:7439873893275854033:2508], TxId: 281474976710687, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01jd8fvfrj5skxrqsm84mpaw6z. SessionId : ydb://session/3?node_id=15&id=MmIzN2Q4M2EtZjk5YjViODAtYmQ2MGIwZDgtZGI5OGY5YQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [15:7439873893275854020:2497], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2024-11-21T23:08:43.695672Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:08:43.696134Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:08:43.696913Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:08:43.698502Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.698966Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:08:43.708749Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.708862Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.708920Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.708999Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:08:43.709179Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.709299Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:08:43.709432Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:08:43.710101Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#5000 2024-11-21T23:08:43.710647Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.710706Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:08:43.710792Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2024-11-21T23:08:43.710831Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 5000 |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> KqpErrors::ProposeResultLost_RwTx [GOOD] |82.0%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TPersQueueTest::DisableWrongSettings [GOOD] >> TPersQueueTest::DisableDeduplication >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> KqpErrors::ProposeError [GOOD] >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx [GOOD] Test command err: 2024-11-21T23:10:26.512651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:10:26.532897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:26.533713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:26.533975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:26.538551Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:26.538652Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0010e1/r3tmp/tmp5062ZJ/pdisk_1.dat 2024-11-21T23:10:26.980099Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:27.210242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:27.357362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:27.357564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:27.364862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:27.364971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:27.382986Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:10:27.383538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:27.383909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:27.770697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:28.893021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1515:2919], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:28.893186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1526:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:28.893606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:28.899786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:10:29.636830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1529:2927], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:10:30.111257Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T23:10:30.111358Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T23:10:30.111426Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:30.111517Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T23:10:30.111590Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T23:10:30.114466Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T23:10:30.124674Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.435497s, cancelAfter: (empty maybe) 2024-11-21T23:10:30.124764Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2024-11-21T23:10:30.124830Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:30.124902Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T23:10:30.124963Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T23:10:30.125616Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2024-11-21T23:10:30.126105Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1704:2917] TxId: 0. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T23:10:30.126201Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2024-11-21T23:10:30.126311Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T23:10:30.126419Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:30.126712Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2024-11-21T23:10:30.126888Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T23:10:30.126992Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T23:10:30.127308Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2024-11-21T23:10:30.127452Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T23:10:30.127892Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:30.127950Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhgtegahnbqgcv8jqac6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWRhMmI4OWYtOTliNjQwNzMtZTdhYmEzNWItMTgwNmVhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T23:10:3 ... : { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T23:10:30.291178Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Preparing 2024-11-21T23:10:30.291237Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037889 not finished yet: Preparing 2024-11-21T23:10:30.291268Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037890 not finished yet: Preparing 2024-11-21T23:10:30.291291Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037891 not finished yet: Preparing 2024-11-21T23:10:30.291347Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 4 datashard(s): DS 72075186224037888 (Preparing), DS 72075186224037889 (Preparing), DS 72075186224037890 (Preparing), DS 72075186224037891 (Preparing), 2024-11-21T23:10:30.291406Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, not immediate tx, become PrepareState 2024-11-21T23:10:30.295787Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shard 72075186224037889 propose error, notDelivered: 0, notPrepared: 0, wasRestart: 0 2024-11-21T23:10:30.295862Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037888 2024-11-21T23:10:30.295938Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037889 2024-11-21T23:10:30.295978Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037890 2024-11-21T23:10:30.296027Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send CancelTransactionProposal to shard: 72075186224037891 2024-11-21T23:10:30.305862Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2024-11-21T23:10:30.305944Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 2, does not have the CA id yet or is already complete 2024-11-21T23:10:30.305968Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 3, does not have the CA id yet or is already complete 2024-11-21T23:10:30.305992Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 4, does not have the CA id yet or is already complete 2024-11-21T23:10:30.311976Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: UNDETERMINED Issues { message: "State of operation is unknown." issue_code: 2026 severity: 1 issues { message: "Tx state unknown for shard 72075186224037889, txid 281474976715661" issue_code: 200506 severity: 1 } } Result { Stats { } } , to ActorId: [1:1713:3033] 2024-11-21T23:10:30.312297Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2024-11-21T23:10:30.312428Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:10:30.312487Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1728:3033] TxId: 281474976715661. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T23:10:30.313884Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, ActorId: [1:1713:3033], ActorState: ExecuteState, TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Create QueryResponse for error on request, msg: 2024-11-21T23:10:30.316242Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1736:3033] TxId: 0. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T23:10:30.316594Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T23:10:30.316965Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Resolved key sets: 0 2024-11-21T23:10:30.317195Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:30.317248Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715662. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T23:10:30.317294Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T23:10:30.317327Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T23:10:30.317423Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:10:30.317516Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T23:10:30.317601Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1736:3033] TxId: 281474976715662. Ctx: { TraceId: 01jd8fvjrkdq6bjxejthc2x0ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3NWQzNWUtOTQ5MDhiZmQtNDM3YWI1YmItYjY4YWQ4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadOneMessage >> TxUsage::WriteToTopic_Demo_3 >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2024-11-21T23:10:08.384503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873813591291879:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:08.384632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001100/r3tmp/tmpEfRdgC/pdisk_1.dat 2024-11-21T23:10:08.845751Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:08.855801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:08.855920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:08.861312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20943, node 1 2024-11-21T23:10:08.998911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:08.998932Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:08.998943Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:08.999026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:09.329144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:09.335522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:09.335580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:09.339477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:10:09.339730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:10:09.339754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:10:09.342539Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:10:09.342570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 waiting... 2024-11-21T23:10:09.344408Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:09.355402Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:09.356250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230609396, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:09.356301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:10:09.356576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:10:09.359548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:09.359764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:09.359816Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:10:09.359891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:10:09.359937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:10:09.359992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:10:09.363078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:10:09.363161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:10:09.363184Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:10:09.363287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:10:13.386521Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873813591291879:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:13.386612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:10:23.845029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:10:23.845089Z node 1 :IMPORT WARN: Table profiles were not loaded >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeError [GOOD] Test command err: 2024-11-21T23:10:26.661014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:431:2128], Scheduled retry for error: {
: Error: Scheme service not found } 2024-11-21T23:10:26.676991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:434:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:26.677748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:26.677998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:26.680600Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:10:26.680669Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0010ed/r3tmp/tmpJ8W46k/pdisk_1.dat 2024-11-21T23:10:27.175455Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:27.452372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:27.591050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:27.591211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:27.604273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:27.604460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:27.619081Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:10:27.619636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:27.620027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:28.030626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:29.108143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1515:2919], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:29.108292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1526:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:29.108674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:29.114745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:10:29.835812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1529:2927], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:10:30.277049Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2024-11-21T23:10:30.277131Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2024-11-21T23:10:30.277208Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:30.277280Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T23:10:30.277352Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T23:10:30.280408Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2024-11-21T23:10:30.289615Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.412290s, cancelAfter: (empty maybe) 2024-11-21T23:10:30.289688Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2024-11-21T23:10:30.289754Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:30.289823Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2024-11-21T23:10:30.289887Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2024-11-21T23:10:30.290508Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2024-11-21T23:10:30.290959Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1704:2917] TxId: 0. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T23:10:30.291042Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2024-11-21T23:10:30.291134Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T23:10:30.291223Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2024-11-21T23:10:30.291482Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2024-11-21T23:10:30.291643Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T23:10:30.291751Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2024-11-21T23:10:30.292011Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2024-11-21T23:10:30.292136Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1704:2917] TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2024-11-21T23:10:30.292561Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:30.292621Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jd8fvhqh7m0vpb918ypp6hn4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNmZDM2YTgtZjk3NDhjMGQtMmI3NmU0OTktYmJjN2ZhYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T23:10:3 ... OTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:31.666387Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 1, snapshot: {0, 0} 2024-11-21T23:10:31.667541Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1949 RawX2: 4294970478 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004\207\203\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\016\014Arg\000\002)\211\002?\022\204\214\002(KqpEffects\000)\211\010?\036\213\010\203\010\203\010\203\005@\203\010\204?\n\210\203\004\203\004\203\0144KqpUpsertRows\000\013?*\003?\"\177\000\001\205\000\000\000\000\001\003?$\004\003?&\000\003?(\002\017)\211\002?,?\014 Iterator\000)\211\004?\014?\016\203\004\030Member\000?\032\003?D\000\002\004\000\006\010\002?2\003\203\004\004\003\203\004\002\003\003?4\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\t\000\002\n\n" Rows: 1 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\005\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "default" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=" } RequestContext { key: "TraceId" value: "01jd8fvm5710z0ks52rb77s5rf" } EnableSpilling: false 2024-11-21T23:10:31.667691Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2024-11-21T23:10:31.667790Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2024-11-21T23:10:31.667892Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T23:10:31.667951Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T23:10:31.667994Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2024-11-21T23:10:31.668049Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2024-11-21T23:10:31.668096Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2024-11-21T23:10:31.683909Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: RESPONSE_DATA, error: 2024-11-21T23:10:31.684052Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2024-11-21T23:10:31.684211Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: GENERIC_ERROR Issues { message: "Error executing transaction: transaction failed." severity: 1 } Result { Stats { CpuTimeUs: 260 } } , to ActorId: [1:1939:3182] 2024-11-21T23:10:31.684320Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2024-11-21T23:10:31.684423Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:10:31.684472Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1949:3182] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T23:10:31.684639Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, ActorId: [1:1939:3182], ActorState: ExecuteState, TraceId: 01jd8fvm5710z0ks52rb77s5rf, Create QueryResponse for error on request, msg: 2024-11-21T23:10:31.685110Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1954:3182] TxId: 0. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2024-11-21T23:10:31.685182Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2024-11-21T23:10:31.685327Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715684. Resolved key sets: 0 2024-11-21T23:10:31.685408Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:10:31.685444Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715684. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, pool: Data, localComputeTasks: 0, snapshot: {0, 0} 2024-11-21T23:10:31.685487Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2024-11-21T23:10:31.685516Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2024-11-21T23:10:31.685596Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:10:31.685629Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2024-11-21T23:10:31.685673Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1954:3182] TxId: 281474976715684. Ctx: { TraceId: 01jd8fvm5710z0ks52rb77s5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg1ODJiMDktN2FjMGIzYzUtOTJiZTA0YjgtNmU5NmUxYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> BasicUsage::ConnectToYDB >> TxUsage::WriteToTopic_Two_WriteSession |82.1%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestRewriteThenLastValue >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2024-11-21T23:10:04.684959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873794030803401:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:04.686337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00118c/r3tmp/tmpOSBZOK/pdisk_1.dat 2024-11-21T23:10:05.130777Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:05.132175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:05.132242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25709, node 1 2024-11-21T23:10:05.205896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:05.236098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:05.236677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:05.236698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:05.236762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:10:05.236827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:10:05.414985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:05.415005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:05.415011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:05.415096Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:05.971721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:05.995703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:05.995764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:06.007146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:10:06.007570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:10:06.007587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:10:06.014287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:10:06.014315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:10:06.019199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:06.025986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:06.030019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230606078, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:06.030053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:10:06.030369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:10:06.032491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:06.032693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:06.032774Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:10:06.032854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:10:06.032900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:10:06.032950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:10:06.035454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:10:06.035504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:10:06.035523Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:10:06.035596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:10:10.182764Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873820202902430:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:10.182831Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00118c/r3tmp/tmpZa9889/pdisk_1.dat 2024-11-21T23:10:10.458903Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:10.473785Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:10.473868Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:10.480376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21380, node 4 2024-11-21T23:10:10.574066Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:10:10.574087Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:10:10.574094Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:10:10.574187Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:10.818856Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:10.819236Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:10.819269Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:10.827325Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:10:10.827505Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:10:10.827519Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:10:10.835180Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:10:10.835215Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:10:10.837564Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:10.838769Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:10:10.845854Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230610887, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:10.845892Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:10:10.846161Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:10:10.851614Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:10.851808Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:10.851861Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:10:10.851948Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:10:10.851990Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:10:10.852055Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:10:10.853207Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:10:10.853255Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:10:10.853272Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:10:10.853359Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:10:15.183469Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439873820202902430:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:15.183570Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:10:25.407536Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:10:25.407559Z node 4 :IMPORT WARN: Table profiles were not loaded >> CompressExecutor::TestExecutorMemUsage [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] |82.1%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TKeyValueTest::TestRenameWorks >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:56.012522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:56.012639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:56.012678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:56.012712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:56.012783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:56.012813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:56.012880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:56.013257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:56.083336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:56.083402Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:56.092674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:56.093122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:56.093315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:56.114381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:56.114813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:56.115596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:56.116289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:56.120225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.121645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:56.121704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.121779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:56.121824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:56.121857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:56.122107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:56.128932Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:56.255474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:56.255760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.255989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:56.256240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:56.256301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.259394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:56.259544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:56.259838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.259903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:56.259973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:56.260010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:56.263711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.263776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:56.263804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:56.265522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.265562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.265609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:56.265642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:56.268597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:56.270414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:56.270637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:56.271600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:56.271717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:56.271762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:56.271994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:56.272032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:56.272208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:56.272287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:56.274198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:56.274249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:56.274441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.274493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:56.274895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:56.274938Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:56.275278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:56.275315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:56.275373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:56.275440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:56.275490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:56.275525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:56.275600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:56.275645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:56.275678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:10:35.678244Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:10:35.678571Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 344us result status StatusSuccess 2024-11-21T23:10:35.679426Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:10:35.704811Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1082:2829] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:10:35.704948Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1004:2829] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T23:10:35.705120Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1082:2829] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732230635631703 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732230635631703 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1732230635631703 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:10:35.708743Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1082:2829] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2024-11-21T23:10:35.708871Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1004:2829] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart >> TYardTest::TestLogWriteCutUnequal [GOOD] >> TYardTest::TestLogMultipleWriteRead >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T23:07:43.858502Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732230463858470 2024-11-21T23:07:44.451178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873194331152114:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:44.451224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:44.583635Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873195274577270:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:44.770788Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:44.760034Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001394/r3tmp/tmpUwF6Tx/pdisk_1.dat 2024-11-21T23:07:44.883942Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:45.348559Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:45.462209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:45.462333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:45.465910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:45.465982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:45.468307Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:45.477006Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:07:45.477109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:45.481163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5962, node 1 2024-11-21T23:07:45.698584Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001394/r3tmp/yandex0OYzx4.tmp 2024-11-21T23:07:45.698603Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001394/r3tmp/yandex0OYzx4.tmp 2024-11-21T23:07:45.698713Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001394/r3tmp/yandex0OYzx4.tmp 2024-11-21T23:07:45.698808Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:07:45.795029Z INFO: TTestServer started on Port 13628 GrpcPort 5962 TClient is connected to server localhost:13628 PQClient connected to localhost:5962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:46.450332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:07:49.453912Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873194331152114:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:49.454001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:49.528591Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873195274577270:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:49.528656Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:50.960664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873221044381231:2288], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:50.960757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873221044381218:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:50.961101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:50.974542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T23:07:51.038722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873221044381234:2289], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T23:07:51.470549Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873224395924281:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:51.474972Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873225339348565:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:51.476522Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjRlMWVlYmYtOTRhZGM4YmMtNGZiOGJlYS1jODI4YTRj, ActorId: [2:7439873221044381193:2282], ActorState: ExecuteState, TraceId: 01jd8fpq9b5jmvvs3a96vhgwc5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:51.487608Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjJhMjdhNi1hYjNmNzFjNy04OTkwZDljNC0zZTYyODk5Ng==, ActorId: [1:7439873224395924255:2306], ActorState: ExecuteState, TraceId: 01jd8fpqd099pe2x9skqa98n6d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:51.492162Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:51.506965Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:51.509312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:07:51.984922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:07:52.219025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:5962", true, true, 1000); 2024-11-21T23:07:53.011730Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8fprwk4vgqczdmn7xv5x05, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQxNWE2NGEtZTQyNTI3OTktZDM0MmZhZjQtMmJkMTkxOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:07:53.091144Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { TxMeta { } YdbResults { columns { name: "C.name" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.balancer" type { optional_type { item { type_id: UTF8 } } } } columns { name: "C.local" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.enabled" type { optional_type { item { type_id: BOOL } } } } columns { name: "C.weight" type { optional_type { item { type_id: UINT64 } } } } columns { name: "V.version" type { optional_type { item { type_id: INT64 } } } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 341 } === CheckClustersList. Subcribe to ClusterTracker from [1:7439873232985859318:2992] === CheckClustersList. Ok 2024-11-21T23:07:59.953476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:08:00.362676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:08:00.362709Z node 1 :IMPOR ... GroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T23:10:32.266754Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T23:10:32.266802Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T23:10:32.267798Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T23:10:32.268028Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:40806 2024-11-21T23:10:32.268056Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:40806 proto=v1 topic=test-topic durationSec=0 2024-11-21T23:10:32.268072Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:10:32.270116Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T23:10:32.270306Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T23:10:32.270323Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T23:10:32.270335Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T23:10:32.270360Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873914459552237:2542] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T23:10:32.278107Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873914459552237:2542] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T23:10:32.502518Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710700. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:10:32.502700Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439873914459552249:2544] TxId: 281474976710700. Ctx: { TraceId: 01jd8fvmtpbxkrxsfcvf6xgmcn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MTI3MGMxNDMtODZkODNkMC01YTVkMmIwZC1lOTc3NDEyMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:10:32.503150Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MTI3MGMxNDMtODZkODNkMC01YTVkMmIwZC1lOTc3NDEyMw==, ActorId: [15:7439873914459552238:2544], ActorState: ExecuteState, TraceId: 01jd8fvmtpbxkrxsfcvf6xgmcn, Create QueryResponse for error on request, msg: 2024-11-21T23:10:32.504590Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7439873914459552237:2542] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MTI3MGMxNDMtODZkODNkMC01YTVkMmIwZC1lOTc3NDEyMw==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fvmtqc05q74frnpxzjaeq" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T23:10:32.504728Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MTI3MGMxNDMtODZkODNkMC01YTVkMmIwZC1lOTc3NDEyMw==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fvmtqc05q74frnpxzjaeq" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: Test retry state: get retry delay 2024-11-21T23:10:32.505748Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=MTI3MGMxNDMtODZkODNkMC01YTVkMmIwZC1lOTc3NDEyMw==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fvmtqc05q74frnpxzjaeq" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T23:10:32.505783Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Write session will restart in 2.000000s 2024-11-21T23:10:32.505913Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Write session: Do CDS request 2024-11-21T23:10:32.505946Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Do schedule cds request after 2000 ms 2024-11-21T23:10:32.505123Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD 2024-11-21T23:10:32.863613Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715683. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:32.863764Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439873917472263742:2474] TxId: 281474976715683. Ctx: { TraceId: 01jd8fvn5c2ajvgc68a8g90w0e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YmYzMjYxYjQtNDM1YTM1NTAtMTUzMWQ2N2MtNWVkYzYyMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:32.864198Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=YmYzMjYxYjQtNDM1YTM1NTAtMTUzMWQ2N2MtNWVkYzYyMWQ=, ActorId: [16:7439873917472263739:2474], ActorState: ExecuteState, TraceId: 01jd8fvn5c2ajvgc68a8g90w0e, Create QueryResponse for error on request, msg: 2024-11-21T23:10:32.871157Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fvn5dctmcayz23hcfdv4s" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2024-11-21T23:10:33.254909Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Write session: close. Timeout = 0 ms 2024-11-21T23:10:33.255035Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Write session will now close 2024-11-21T23:10:33.255090Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Write session: aborting 2024-11-21T23:10:33.256000Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T23:10:33.256050Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|32352b48-d88aa5ff-df41d2f8-bb9424f9_0] Write session: destroy 2024-11-21T23:10:33.321790Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710702. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:33.321960Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439873918754519616:2546] TxId: 281474976710702. Ctx: { TraceId: 01jd8fvn36dntqbt6fhjkqp5ej, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=N2VhYmMwNy0yODI2Nzg4OC01ZDYwMGI3Mi1kOTcyOTJh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:33.322555Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=N2VhYmMwNy0yODI2Nzg4OC01ZDYwMGI3Mi1kOTcyOTJh, ActorId: [15:7439873914459552293:2546], ActorState: ExecuteState, TraceId: 01jd8fvn36dntqbt6fhjkqp5ej, Create QueryResponse for error on request, msg: 2024-11-21T23:10:33.325960Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fvnn0a6s88peb8dxkg7kx" } } YdbStatus: UNAVAILABLE ConsumedRu: 371 } 2024-11-21T23:10:33.570323Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715685. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T23:10:33.570584Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439873921767231100:2476] TxId: 281474976715685. Ctx: { TraceId: 01jd8fvn78e1wx7qjesv2egqe2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZmNkMjBlOTUtZDBjYThhMDYtM2ZkNTAwNWYtNzczYjU4ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T23:10:33.571223Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=ZmNkMjBlOTUtZDBjYThhMDYtM2ZkNTAwNWYtNzczYjU4ODA=, ActorId: [16:7439873917472263759:2476], ActorState: ExecuteState, TraceId: 01jd8fvn78e1wx7qjesv2egqe2, Create QueryResponse for error on request, msg: 2024-11-21T23:10:33.579699Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd8fvnvb7dwphjqtj7nrtajw" } } YdbStatus: UNAVAILABLE ConsumedRu: 416 } 2024-11-21T23:10:33.656819Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710704. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:33.656986Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439873918754519704:2554] TxId: 281474976710704. Ctx: { TraceId: 01jd8fvnxs46m9msb93sw2b8jx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MjE0NDRlMzktMmQ5OTUwN2UtNmYzY2I1ODItZmEzM2VhNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:33.657453Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MjE0NDRlMzktMmQ5OTUwN2UtNmYzY2I1ODItZmEzM2VhNjU=, ActorId: [15:7439873918754519701:2554], ActorState: ExecuteState, TraceId: 01jd8fvnxs46m9msb93sw2b8jx, Create QueryResponse for error on request, msg: 2024-11-21T23:10:33.658696Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fvnxs46m9msb93wggxk25" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |82.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::TestLogMultipleWriteRead [GOOD] >> TYardTest::TestLogWriteLsnConsistency >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict [GOOD] >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> DataShardSnapshots::MvccSnapshotLockedWrites [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> DataShardSnapshots::LockedWriteCleanupOnSplit >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> TYardTest::TestLogWriteLsnConsistency [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2024-11-21T23:10:37.710023Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T23:10:37.712635Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2024-11-21T23:10:37.718881Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T23:10:37.718963Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2024-11-21T23:10:37.724877Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T23:10:37.724976Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2024-11-21T23:10:37.724719Z ErrorReason# >> TYardTest::TestLotsOfTinyAsyncLogLatency >> TSettingsValidation::ValidateSettingsFailOnStart [GOOD] >> TxUsage::SessionAbort >> TKeyValueTest::TestBasicWriteRead >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> YdbOlapStore::LogWithUnionAllDescending |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart [GOOD] >> TYardTest::TestLotsOfTinyAsyncLogLatency [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TxUsage::WriteToTopic_Two_WriteSession [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart [GOOD] >> BasicUsage::ConnectToYDB [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart >> TYardTest::TestLogLatency >> BasicUsage::ReadWithoutConsumerWithRestarts >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> DataShardSnapshots::LockedWriteCleanupOnCopyTable |82.1%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |82.1%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_core/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T23:07:55.658472Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732230475658440 2024-11-21T23:07:56.587700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873244621530079:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:56.587760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001017/r3tmp/tmpRImaWI/pdisk_1.dat 2024-11-21T23:07:57.257009Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:57.354321Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:57.499350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:57.619059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:58.135433Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:58.143592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:58.143730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:58.146002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:58.146091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:58.154874Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:07:58.155020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:58.157135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24227, node 1 2024-11-21T23:07:58.485066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001017/r3tmp/yandexZ1AO4Q.tmp 2024-11-21T23:07:58.485092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001017/r3tmp/yandexZ1AO4Q.tmp 2024-11-21T23:07:58.485271Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001017/r3tmp/yandexZ1AO4Q.tmp 2024-11-21T23:07:58.485376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:07:58.692937Z INFO: TTestServer started on Port 11402 GrpcPort 24227 TClient is connected to server localhost:11402 PQClient connected to localhost:24227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:59.434078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:08:01.591289Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873244621530079:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:08:01.591370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:08:04.038457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873280140570521:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:04.038564Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:04.039156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873280140570548:2287], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:08:04.051453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2024-11-21T23:08:04.117579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873280140570550:2288], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2024-11-21T23:08:04.585941Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873278981269510:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:08:04.589621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:08:04.588926Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873280140570585:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:08:04.590418Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjIxZGYyZmMtZTRkNTQxN2MtNzMxODAzMWEtNTA1NTQ5NmM=, ActorId: [1:7439873278981269467:2306], ActorState: ExecuteState, TraceId: 01jd8fq44k15vf07qr3sehnwc0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:08:04.599101Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDJmNjM1MzAtMzU4YWFkNmUtNmYzOGJkOWMtYTA2YmUyZTE=, ActorId: [2:7439873280140570519:2283], ActorState: ExecuteState, TraceId: 01jd8fq41z3ec4d6pqpnwn928q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:08:04.612775Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:08:04.614572Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:08:04.916162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:08:05.231681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:24227", true, true, 1000); 2024-11-21T23:08:05.799737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8fq5dz0w6q73ewgjvvghyb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmY3M2NmNjktMmMyODdiNWUtZGI5NjNiOTMtNmNkYTA0NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439873283276237227:2996] === CheckClustersList. Ok 2024-11-21T23:08:11.050821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:24227 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:08:11.276410Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:24227 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710676 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster ... sionId: ydb://session/3?node_id=16&id=ZGNlYjM0OTYtYTg0Yjc1Y2ItMTljZmIyODgtZTZlZTg3NGE=, ActorId: [16:7439873942000417956:2461], ActorState: ExecuteState, TraceId: 01jd8fvtj1cbh7t67q67es8qxf, Create QueryResponse for error on request, msg: 2024-11-21T23:10:39.006326Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd8fvv7eesq7xxzmxrajn5rr" } } YdbStatus: UNAVAILABLE ConsumedRu: 439 } 2024-11-21T23:10:39.290711Z :INFO: [/Root] MessageGroupId [test-message-group-id] Running cds request ms 2024-11-21T23:10:39.298366Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Got CDS response: write_sessions_clusters { clusters { endpoint: "localhost:26900" name: "dc1" available: true } primary_cluster_selection_reason: CLIENT_LOCATION } version: 1 2024-11-21T23:10:39.298450Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Start write session. Will connect to endpoint: localhost:26900 2024-11-21T23:10:39.308245Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T23:10:39.337789Z node 15 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T23:10:39.337827Z node 15 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T23:10:39.338432Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T23:10:39.338644Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:53766 2024-11-21T23:10:39.338669Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:53766 proto=v1 topic=test-topic durationSec=0 2024-11-21T23:10:39.338683Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:10:39.342836Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T23:10:39.343021Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T23:10:39.343038Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T23:10:39.343053Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T23:10:39.343081Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873946403833999:2543] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T23:10:39.347181Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7439873946403833999:2543] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2024-11-21T23:10:39.530795Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710698. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:10:39.530933Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439873946403834012:2545] TxId: 281474976710698. Ctx: { TraceId: 01jd8fvvqk5vz37n9mzhhqatan, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=OTQ1MDcyNGMtYzEyMzE5NS00M2YyNzQ2MS1kOTExNzk0NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:10:39.531365Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=OTQ1MDcyNGMtYzEyMzE5NS00M2YyNzQ2MS1kOTExNzk0NQ==, ActorId: [15:7439873946403834000:2545], ActorState: ExecuteState, TraceId: 01jd8fvvqk5vz37n9mzhhqatan, Create QueryResponse for error on request, msg: 2024-11-21T23:10:39.533679Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7439873946403833999:2543] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=OTQ1MDcyNGMtYzEyMzE5NS00M2YyNzQ2MS1kOTExNzk0NQ==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fvvqm5h5ngz1mj8aa7s2b" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T23:10:39.533824Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=OTQ1MDcyNGMtYzEyMzE5NS00M2YyNzQ2MS1kOTExNzk0NQ==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fvvqm5h5ngz1mj8aa7s2b" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2024-11-21T23:10:39.534368Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2024-11-21T23:10:39.535648Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=OTQ1MDcyNGMtYzEyMzE5NS00M2YyNzQ2MS1kOTExNzk0NQ==" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fvvqm5h5ngz1mj8aa7s2b" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T23:10:39.535682Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Write session will restart in 2.000000s 2024-11-21T23:10:39.535791Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Write session: Do CDS request 2024-11-21T23:10:39.535814Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Do schedule cds request after 2000 ms 2024-11-21T23:10:39.606841Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710699. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T23:10:39.606990Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439873946403834027:2536] TxId: 281474976710699. Ctx: { TraceId: 01jd8fvv8p3b06f3hffte25ghk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MmRkNTViMjQtNDYxZDg5ZTktZjM2YzA2Y2MtNTFiNWEwN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T23:10:39.607482Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MmRkNTViMjQtNDYxZDg5ZTktZjM2YzA2Y2MtNTFiNWEwN2M=, ActorId: [15:7439873942108866680:2536], ActorState: ExecuteState, TraceId: 01jd8fvv8p3b06f3hffte25ghk, Create QueryResponse for error on request, msg: 2024-11-21T23:10:39.610653Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd8fvvsvav4v3m5p2692pyaa" } } YdbStatus: UNAVAILABLE ConsumedRu: 361 } 2024-11-21T23:10:39.676787Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710700. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:39.676947Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7439873946403834101:2549] TxId: 281474976710700. Ctx: { TraceId: 01jd8fvvx37mv29v4q8p03s4ce, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=YTE2YTQ2MmItM2IyNWFlM2YtYWY4MGJiMTktY2U4ZThjMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:39.677360Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=YTE2YTQ2MmItM2IyNWFlM2YtYWY4MGJiMTktY2U4ZThjMWE=, ActorId: [15:7439873946403834098:2549], ActorState: ExecuteState, TraceId: 01jd8fvvx37mv29v4q8p03s4ce, Create QueryResponse for error on request, msg: 2024-11-21T23:10:39.679229Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fvvx43wgzv73f1smy1jdx" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2024-11-21T23:10:40.305667Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Write session: close. Timeout = 0 ms 2024-11-21T23:10:40.305748Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Write session will now close 2024-11-21T23:10:40.305855Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Write session: aborting 2024-11-21T23:10:40.306768Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T23:10:40.306822Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|39995fdb-d682ccbf-d0211265-9fb8bfe4_0] Write session: destroy 2024-11-21T23:10:40.486031Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720684. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:40.486180Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7439873950590352692:2479] TxId: 281474976720684. Ctx: { TraceId: 01jd8fvwmxfk0mz8h0txjwmwh7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=NjlhMjQzYmUtYTEzYTIzMGQtNjljYTUxNjktNTUyYzg3ZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:10:40.486619Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=NjlhMjQzYmUtYTEzYTIzMGQtNjljYTUxNjktNTUyYzg3ZDY=, ActorId: [16:7439873950590352689:2479], ActorState: ExecuteState, TraceId: 01jd8fvwmxfk0mz8h0txjwmwh7, Create QueryResponse for error on request, msg: 2024-11-21T23:10:40.487838Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fvwmxfk0mz8h0tyq64f4g" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> TxUsage::WriteToTopic_Demo_5 >> TYardTest::TestLogLatency [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_1 [GOOD] >> TYardTest::TestMultiYardLogLatency >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> TxUsage::WriteToTopic_Demo_2 [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TxUsage::WriteToTopic_Demo_10 >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> TKeyValueTest::TestLargeWriteAndDelete >> TYardTest::TestMultiYardLogLatency [GOOD] >> TYardTest::TestMultiYardFirstRecordToKeep >> TxUsage::WriteToTopic_Demo_18_RestartNo >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> TxUsage::WriteToTopic_Demo_4 [GOOD] >> TYardTest::TestMultiYardFirstRecordToKeep [GOOD] >> TxUsage::SessionAbort [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> TxUsage::WriteToTopic_Demo_3 [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> TYardTest::TestMultiYardLogMultipleWriteRead >> DataShardSnapshots::LockedWriteCleanupOnCopyTable [GOOD] >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TPersQueueTest::DisableDeduplication [GOOD] >> TxUsage::WriteToTopic_Demo_34 >> TxUsage::TwoSessionOneConsumer >> DataShardSnapshots::MvccSnapshotReadLockedWrites >> DataShardSnapshots::UncommittedWriteRestartDuringCommit |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts >> TxUsage::WriteToTopic_Demo_26 >> TKeyValueTest::TestBasicWriteRead [GOOD] >> LocalPartition::Restarts [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart [GOOD] >> DataShardTxOrder::RandomPointsAndRanges [GOOD] >> LocalPartition::DiscoveryServiceBadPort >> TYardTest::TestMultiYardLogMultipleWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |82.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |82.1%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] >> TYardTest::TestLogOverwriteRestarts >> DataShardSnapshots::MvccSnapshotReadLockedWrites [GOOD] >> BasicUsage::WriteRead [GOOD] >> Describe::Basic >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:51.107858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:51.107938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.107977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:51.108005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:51.108041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:51.108061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:51.108115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:51.108359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:51.178826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:51.178889Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:51.191821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:51.192259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:51.192426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:51.204369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:51.204719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:51.205283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.208169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:51.213884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.215149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:51.215209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.215278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:51.215333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:51.215373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:51.215617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:51.243393Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:51.363031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:51.363246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.363440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:51.363630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:51.363678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.367065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.367222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:51.367494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.367566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:51.367630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:51.367666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:51.370614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.370689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:51.370752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:51.377207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.377264Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.377317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.377363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.380807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:51.382765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:51.383006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:51.384084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:51.384245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:51.384301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.384648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:51.384732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:51.384984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:51.385072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:51.387211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:51.387260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:51.387410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:51.387446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:51.387750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:51.387786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:51.387867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:51.387899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.387957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:51.388003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:51.388037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:51.388063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:51.388117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:51.388150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:51.388181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publica ... DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2024-11-21T23:10:55.528863Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T23:10:55.528936Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2024-11-21T23:10:55.529073Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T23:10:55.529124Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:10:55.529213Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 326 RawX2: 416611830026 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T23:10:55.529274Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:10:55.529317Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T23:10:55.532165Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:10:55.532324Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:10:55.534664Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:55.535153Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2646 } } 2024-11-21T23:10:55.535200Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2024-11-21T23:10:55.535345Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2646 } } 2024-11-21T23:10:55.535449Z node 97 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 2646 } } 2024-11-21T23:10:55.536529Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T23:10:55.536583Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2024-11-21T23:10:55.536708Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T23:10:55.536756Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:10:55.536841Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 427 RawX2: 416611830111 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2024-11-21T23:10:55.536898Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:10:55.536936Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:55.536976Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:10:55.537019Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:10:55.537050Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2024-11-21T23:10:55.539225Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:55.540300Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:55.540630Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:55.540687Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2024-11-21T23:10:55.540739Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T23:10:55.540781Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2024-11-21T23:10:55.540874Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T23:10:55.540914Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2024-11-21T23:10:55.544334Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:10:55.544393Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2024-11-21T23:10:55.544503Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2024-11-21T23:10:55.544541Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T23:10:55.544595Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2024-11-21T23:10:55.544648Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2024-11-21T23:10:55.544693Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T23:10:55.544728Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T23:10:55.544877Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:10:55.544918Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T23:10:55.548216Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T23:10:55.548270Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T23:10:55.548647Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T23:10:55.548746Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T23:10:55.548780Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:522:2486] TestWaitNotification: OK eventTxId 1003 2024-11-21T23:10:55.549258Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:10:55.549479Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 278us result status StatusSuccess 2024-11-21T23:10:55.549964Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: 2024-11-21T23:10:28.090859Z node 1 :BS_PROXY_GET ERROR: [47ad982f08e135f5] Response# TEvGetResult {Status# DEADLINE ResponseSz# 1 {[72057594037927937:2:1:2:1:5:0] DEADLINE Size# 0 RequestedSize# 5} ErrorReason# "status# DEADLINE from# [0:1:0:0:0]"} Marker# BPG29 2024-11-21T23:10:28.091051Z node 1 :BS_VDISK_PATCH ERROR: VDISK[0:_:0:0:0]: TEvVMovedPatch: failed on VGet; OriginalBlobId# [72057594037927937:2:1:2:1:5:0] PatchedBlobId# [72057594037927937:2:1:2:4:5:0] ErrorReason# Couldn't get the original blob; GetStatus# DEADLINE GetResponseStatus# DEADLINE GetErrorReason# status# DEADLINE from# [0:1:0:0:0] Marker# BSVSP01 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:143:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:146:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:145:2167] Leader for TabletID 72057594037927937 is [5:148:2168] sender: [5:149:2057] recipient: [5:145:2167] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:148:2168] Leader for TabletID 72057594037927937 is [5:148:2168] sender: [5:218:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2172] Leader for TabletID 72057594037927937 is [7:153:2173] sender: [7:154:2057] recipient: [7:150:2172] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2173] Leader for TabletID 72057594037927937 is [7:153:2173] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:151:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:153:2174] Leader for TabletID 72057594037927937 is [8:156:2175] sender: [8:157:2057] recipient: [8:153:2174] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:156:2175] Leader for TabletID 72057594037927937 is [8:156:2175] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:156:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:156:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:153:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:156:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:155:2176] Leader for TabletID 72057594037927937 is [10:158:2177] sender: [10:159:2057] recipient: [10:155:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:158:2177] Leader for TabletID 72057594037927937 is [10:158:2177] sender: [10:228:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:154:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:158:2057] recipient: [11:156:2176] Leader for TabletID 72057594037927937 is [11:159:2177] sender: [11:160:2057] recipient: [11:156:2176] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:159:2177] Leader for TabletID 72057594037927937 is [11:159:2177] sender: [11:229:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:157:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:160:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:161:2057] recipient: [12:159:2179] Leader for TabletID 72057594037927937 is [12:162:2180] sender: [12:163:2057] recipient: [12:159:2179] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:162:2180] Leader for TabletID 72057594037927937 is [12:162:2180] sender: [12:215:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:161:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:163:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:165:2057] recipient: [13:164:2183] Leader for TabletID 72057594037927937 is [13:166:2184] sender: [13:167:2057] recipient: [13:164:2183] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:166:2184] Leader for TabletID 72057594037927937 is [13:166:2184] sender: [13:219:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:166:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:169:2057] recipient: [14:168:2188] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:170:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:172:2057] recipient: [14:168:2188] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:171:2189] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:241:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:166:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:168:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:170:2057] recipient: [15:169:2188] Leader for TabletID 72057594037927937 is [15:171:2189] sender: [15:172:2057] recipient: [15:169:2188] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:171:2189] Leader for TabletID 72057594037927937 is [15:171:2189] sender: [15:241:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |82.1%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> DataShardSnapshots::RepeatableReadAfterSplitRace |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestGetStatusWorks >> TxUsage::WriteToTopic_Demo_34 [GOOD] >> TxUsage::TwoSessionOneConsumer [GOOD] |82.2%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |82.2%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_35 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2024-11-21T23:08:34.316111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:34.316164Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:08:34.316244Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:08:34.329251Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:08:34.329728Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:130:2153] 2024-11-21T23:08:34.330037Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:34.339942Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:130:2153]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:08:34.379967Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:34.380511Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:08:34.381916Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2024-11-21T23:08:34.382034Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2024-11-21T23:08:34.382095Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2024-11-21T23:08:34.382589Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:08:34.405115Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2024-11-21T23:08:34.405284Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:08:34.405419Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:194:2196] 2024-11-21T23:08:34.405467Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2024-11-21T23:08:34.405500Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2024-11-21T23:08:34.405532Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:34.405811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:34.405860Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:34.405988Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2024-11-21T23:08:34.406072Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2024-11-21T23:08:34.406275Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:34.406313Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:08:34.406346Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2024-11-21T23:08:34.406375Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:08:34.406489Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:08:34.406520Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2024-11-21T23:08:34.406557Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:08:34.467910Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:206:2207], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:34.467973Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:34.468021Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:204:2206], serverId# [1:206:2207], sessionId# [0:0:0] 2024-11-21T23:08:34.470674Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:97:2132], Recipient [1:130:2153]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 97 RawX2: 4294969428 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2024-11-21T23:08:34.470731Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:08:34.470830Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2024-11-21T23:08:34.470980Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2024-11-21T23:08:34.471043Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2024-11-21T23:08:34.471110Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2024-11-21T23:08:34.471155Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:34.471185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2024-11-21T23:08:34.471216Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2024-11-21T23:08:34.471245Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:34.471504Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2024-11-21T23:08:34.471532Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2024-11-21T23:08:34.471562Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2024-11-21T23:08:34.471606Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:34.471661Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2024-11-21T23:08:34.471684Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2024-11-21T23:08:34.471713Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2024-11-21T23:08:34.471749Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:34.471790Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2024-11-21T23:08:34.494903Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2024-11-21T23:08:34.494979Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2024-11-21T23:08:34.495027Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2024-11-21T23:08:34.495069Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2024-11-21T23:08:34.495140Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2024-11-21T23:08:34.495652Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:213:2213], Recipient [1:130:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:34.495702Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:08:34.495744Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:213:2213], sessionId# [0:0:0] 2024-11-21T23:08:34.495884Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:97:2132], Recipient [1:130:2153]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2024-11-21T23:08:34.495915Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:08:34.496024Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2024-11-21T23:08:34.496064Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:34.496096Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2024-11-21T23:08:34.496129Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2024-11-21T23:08:34.500075Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 97 RawX2: 4294969428 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2024-11-21T23:08:34.500144Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:08:34.500359Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:130:2153], Recipient [1:130:2153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:34.500398Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:08:34.500441Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:08:34.500511Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:08:34.500567Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:08:34.500608Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2024-11-21T23:08:34.500643Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2024-11-21T23:08:34.500680Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:34.500711Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2024-11-21T23:08:34.500741Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2024-11-21T23:08:34.500768Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2024-11-21T23:08:34.500946Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2024-11-21T23:08:34.500982Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:34.501007Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2024-11-21T23:08:34.501027Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2024-11-21T23:08:34.501048Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2024-11-21T23:08:34.501102Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:08:34.501141Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2024-11-21T23:08:34.501186Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2024-11-21T23:08:34.501217Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2024-11-21T23:08:34.501271Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2024-11-21T23:08:34.501301Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2024-11-21T23:08:34.501333Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2024-11-21T23:08:34.501373Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2024-11-21T23:08:34.501405Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit BuildAndWaitDependencies 2024-11-21T23:08:34.501438Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit Ma ... s: 0, InvisibleRowSkips: 0} 2024-11-21T23:10:52.563337Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2024-11-21T23:10:52.563374Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2024-11-21T23:10:52.563409Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2024-11-21T23:10:52.563447Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2024-11-21T23:10:52.563741Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is DelayComplete 2024-11-21T23:10:52.563779Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2024-11-21T23:10:52.563811Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2024-11-21T23:10:52.563845Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2024-11-21T23:10:52.563886Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437185 is Executed 2024-11-21T23:10:52.563914Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2024-11-21T23:10:52.563944Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437185 has finished 2024-11-21T23:10:52.563985Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:52.564014Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2024-11-21T23:10:52.564069Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2024-11-21T23:10:52.564102Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2024-11-21T23:10:52.589148Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:10:52.589227Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:402] at 9437186 on unit CompleteOperation 2024-11-21T23:10:52.589295Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 402] from 9437186 at tablet 9437186 send result to client [4:97:2132], exec latency: 3 ms, propose latency: 5 ms 2024-11-21T23:10:52.589362Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2024-11-21T23:10:52.589399Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:10:52.589436Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2024-11-21T23:10:52.589683Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:10:52.589714Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit StoreAndSendOutRS 2024-11-21T23:10:52.589742Z node 4 :TX_DATASHARD DEBUG: Send RS 400 at 9437186 from 9437186 to 9437184 txId 403 2024-11-21T23:10:52.589790Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2024-11-21T23:10:52.589813Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit CompleteOperation 2024-11-21T23:10:52.589849Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437186 at tablet 9437186 send result to client [4:97:2132], exec latency: 0 ms, propose latency: 2 ms 2024-11-21T23:10:52.589891Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2024-11-21T23:10:52.589916Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2024-11-21T23:10:52.590680Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:434:2384], Recipient [4:329:2302]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2024-11-21T23:10:52.590725Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:52.590758Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 402 2024-11-21T23:10:52.591061Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [4:434:2384], Recipient [4:227:2222]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2024-11-21T23:10:52.591099Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T23:10:52.591129Z node 4 :TX_DATASHARD DEBUG: Receive RS at 9437184 source 9437186 dest 9437184 producer 9437186 txId 403 2024-11-21T23:10:52.591194Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437184 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2024-11-21T23:10:52.591245Z node 4 :TX_DATASHARD TRACE: Filled readset for [1000004:403] from=9437186 to=9437184origin=9437186 2024-11-21T23:10:52.591310Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2024-11-21T23:10:52.591386Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:434:2384], Recipient [4:329:2302]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2024-11-21T23:10:52.591415Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:52.591439Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 403 2024-11-21T23:10:52.591876Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [4:227:2222], Recipient [4:227:2222]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:52.591915Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:52.591961Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2024-11-21T23:10:52.591998Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:10:52.592033Z node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437184 for LoadAndWaitInRS 2024-11-21T23:10:52.592076Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit LoadAndWaitInRS 2024-11-21T23:10:52.592106Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2024-11-21T23:10:52.592133Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit LoadAndWaitInRS 2024-11-21T23:10:52.592158Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit ExecuteDataTx 2024-11-21T23:10:52.592185Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit ExecuteDataTx 2024-11-21T23:10:52.595210Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437184 with status COMPLETE 2024-11-21T23:10:52.595271Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437184: {NSelectRow: 3, NSelectRange: 6, NUpdateRow: 5, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 226, SelectRangeBytes: 1808, UpdateRowBytes: 37, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2024-11-21T23:10:52.595324Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is ExecutedNoMoreRestarts 2024-11-21T23:10:52.595354Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit ExecuteDataTx 2024-11-21T23:10:52.595383Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompleteOperation 2024-11-21T23:10:52.595414Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompleteOperation 2024-11-21T23:10:52.595665Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is DelayComplete 2024-11-21T23:10:52.595695Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompleteOperation 2024-11-21T23:10:52.595720Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompletedOperations 2024-11-21T23:10:52.595749Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompletedOperations 2024-11-21T23:10:52.595786Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2024-11-21T23:10:52.595813Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2024-11-21T23:10:52.595841Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437184 has finished 2024-11-21T23:10:52.595877Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:52.595904Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2024-11-21T23:10:52.595934Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2024-11-21T23:10:52.595966Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2024-11-21T23:10:52.615945Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2024-11-21T23:10:52.616012Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2024-11-21T23:10:52.616092Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:97:2132], exec latency: 3 ms, propose latency: 5 ms 2024-11-21T23:10:52.616159Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2024-11-21T23:10:52.616197Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2024-11-21T23:10:52.616736Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:329:2302], Recipient [4:227:2222]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2024-11-21T23:10:52.616778Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:52.616816Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 2024-11-21T23:10:52.638906Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2024-11-21T23:10:52.638977Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2024-11-21T23:10:52.639041Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:97:2132], exec latency: 3 ms, propose latency: 5 ms 2024-11-21T23:10:52.639106Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2024-11-21T23:10:52.639148Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2024-11-21T23:10:52.639501Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:227:2222], Recipient [4:434:2384]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2024-11-21T23:10:52.639543Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:10:52.639579Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> TxUsage::WriteToTopic_Demo_10 [GOOD] >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction |82.2%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> TxUsage::WriteToTopic_Demo_5 [GOOD] |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |82.2%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> TxUsage::WriteToTopic_Demo_18_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_6 |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TKeyValueTest::TestWriteLongKey [GOOD] >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:142:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:145:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:146:2057] recipient: [5:144:2166] Leader for TabletID 72057594037927937 is [5:147:2167] sender: [5:148:2057] recipient: [5:144:2166] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:147:2167] Leader for TabletID 72057594037927937 is [5:147:2167] sender: [5:217:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:147:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:149:2171] Leader for TabletID 72057594037927937 is [7:152:2172] sender: [7:153:2057] recipient: [7:149:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:152:2172] Leader for TabletID 72057594037927937 is [7:152:2172] sender: [7:222:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:148:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:151:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:152:2057] recipient: [8:150:2171] Leader for TabletID 72057594037927937 is [8:153:2172] sender: [8:154:2057] recipient: [8:150:2171] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:153:2172] Leader for TabletID 72057594037927937 is [8:153:2172] sender: [8:223:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TxUsage::WriteToTopic_Demo_11 >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> TxUsage::WriteToTopic_Demo_26 [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> Describe::Basic [GOOD] >> Describe::Statistics >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |82.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey >> BSCRestartPDisk::RestartOneByOne [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:226:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:156:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:156:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:156:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:155:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:155:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:156:2176] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:160:2057] recipient: [10:156:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:159:2177] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:229:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:161:2057] recipient: [11:159:2179] Leader for TabletID 72057594037927937 is [11:162:2180] sender: [11:163:2057] recipient: [11:159:2179] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:162:2180] Leader for TabletID 72057594037927937 is [11:162:2180] sender: [11:215:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:161:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:163:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:165:2057] recipient: [12:164:2183] Leader for TabletID 72057594037927937 is [12:166:2184] sender: [12:167:2057] recipient: [12:164:2183] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:166:2184] Leader for TabletID 72057594037927937 is [12:166:2184] sender: [12:219:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:166:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:169:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:170:2057] recipient: [13:168:2188] Leader for TabletID 72057594037927937 is [13:171:2189] sender: [13:172:2057] recipient: [13:168:2188] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:171:2189] Leader for TabletID 72057594037927937 is [13:171:2189] sender: [13:241:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:166:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:169:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:170:2057] recipient: [14:168:2188] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:172:2057] recipient: [14:168:2188] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:171:2189] Leader for TabletID 72057594037927937 is [14:171:2189] sender: [14:241:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:167:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:170:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:171:2057] recipient: [15:169:2188] Leader for TabletID 72057594037927937 is [15:172:2189] sender: [15:173:2057] recipient: [15:169:2188] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:172:2189] Leader for TabletID 72057594037927937 is [15:172:2189] sender: [15:242:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] >> TxUsage::WriteToTopic_Demo_27 >> TYardTest::TestLogOverwriteRestarts [GOOD] >> TYardTest::TestMultiYardHarakiri ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 2043560934542252986 >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestIncrementalKeySet [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:56.803362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:56.803446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:56.803501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:56.803537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:56.803602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:56.803631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:56.803701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:56.804051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:56.893666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:56.893743Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:56.903991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:56.904430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:56.904686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:56.931187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:56.931562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:56.932265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:56.932959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:56.936738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.937666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:56.937708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:56.937759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:56.937789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:56.938337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:56.938668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:56.947535Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:57.072547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:57.072880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.073127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:57.073386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:57.073454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.079704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:57.079875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:57.080151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.080236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:57.080305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:57.080348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:57.082578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.082638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:57.082679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:57.084471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.084524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.084594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:57.084665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:57.088679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:57.090686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:57.090935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:57.091951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:57.092082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:57.092134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:57.092430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:57.092491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:57.092722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:57.093075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:57.095302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:57.095381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:57.095592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:57.095635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:57.095934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:57.095970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:57.096051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:57.096079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:57.096128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:57.096174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:57.096214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:57.096239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:57.096290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:57.096319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:57.096346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... 6 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:11:06.077079Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:11:06.077341Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 286us result status StatusSuccess 2024-11-21T23:11:06.078081Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:11:06.090655Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:11:06.090794Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:11:06.090880Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T23:11:06.090961Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2024-11-21T23:11:06.091119Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732230666058362 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732230666058362 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:11:06.091285Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1732230666058362 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:11:06.096170Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1010:2783] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2024-11-21T23:11:06.097308Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T23:11:06.098580Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1011:2783] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T23:11:06.098695Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:932:2783] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: 2024-11-21T23:09:57.152762Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873765225999603:2060];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:57.187704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011d9/r3tmp/tmpQy7GOy/pdisk_1.dat 2024-11-21T23:09:58.143020Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:58.219898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:58.220012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:58.220308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:58.224878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11716, node 1 2024-11-21T23:09:58.794651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:09:58.794686Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:09:58.794693Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:09:58.794822Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:59.213903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.222977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:09:59.223057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.229506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:09:59.229747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:09:59.229763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2024-11-21T23:09:59.239457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:09:59.239492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:09:59.245431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:09:59.254887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:09:59.267034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230599309, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:09:59.267081Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:09:59.270752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:09:59.280996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:09:59.281203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:09:59.281261Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:09:59.281345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:09:59.281386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:09:59.281439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:09:59.295562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:09:59.295653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:09:59.295677Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:09:59.295787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2024-11-21T23:10:02.150795Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873765225999603:2060];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:02.150873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:10:13.142609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:10:13.142660Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:58.239001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2024-11-21T23:10:58.239418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2024-11-21T23:10:58.239472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:143:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:143:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:143:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:147:2057] recipient: [12:145:2167] Leader for TabletID 72057594037927937 is [12:148:2168] sender: [12:149:2057] recipient: [12:145:2167] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:148:2168] Leader for TabletID 72057594037927937 is [12:148:2168] sender: [12:218:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:148:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:150:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:152:2057] recipient: [13:151:2172] Leader for TabletID 72057594037927937 is [13:153:2173] sender: [13:154:2057] recipient: [13:151:2172] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:153:2173] Leader for TabletID 72057594037927937 is [13:153:2173] sender: [13:223:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:148:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:150:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:152:2057] recipient: [14:151:2172] Leader for TabletID 72057594037927937 is [14:153:2173] sender: [14:154:2057] recipient: [14:151:2172] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:153:2173] Leader for TabletID 72057594037927937 is [14:153:2173] sender: [14:223:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:151:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:154:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:155:2057] recipient: [15:153:2174] Leader for TabletID 72057594037927937 is [15:156:2175] sender: [15:157:2057] recipient: [15:153:2174] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:156:2175] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] 2024-11-21T23:11:07.321707Z node 17 :BS_PROXY_GET ERROR: [47ad982f08e135f5] Response# TEvGetResult {Status# BLOCKED ResponseSz# 1 {[72057594037927937:2:1:2:1:5:0] BLOCKED Size# 0 RequestedSize# 5} ErrorReason# "status# BLOCKED from# [0:1:0:0:0]"} Marker# BPG29 2024-11-21T23:11:07.321930Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 24 SentAt# 1970-01-01T00:00:00.024000Z GotAt# 24 ErrorReason# status# BLOCKED from# [0:1:0:0:0] 2024-11-21T23:11:07.328515Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2024-11-21T23:11:07.328600Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3, Details: Status# ALREADY From# [0:1:0:0:0] NodeId# 17 QuorumTracker# {Erroneous# 1 Successful# 0} Marker# TSYS31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 11858701457062465642 >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] |82.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2024-11-21T23:11:09.105379Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2024-11-21T23:11:09.108462Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2024-11-21T23:11:09.115660Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2024-11-21T23:11:09.115745Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2024-11-21T23:11:09.122727Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2024-11-21T23:11:09.122801Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2024-11-21T23:11:09.122848Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TKeyValueTest::TestObtainLockNewApi >> LocalPartition::DiscoveryServiceBadPort [GOOD] >> LocalPartition::DiscoveryServiceBadNodeId >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TxUsage::WriteToTopic_Demo_35 [GOOD] >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TxUsage::Offsets_Cannot_Be_Promoted_When_Reading_In_A_Transaction [GOOD] >> TxUsage::WriteToTopic_Demo_36 >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TxUsage::ReadRuleGeneration >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> TableCreation::SimpleTableCreation >> TableCreation::MultipleTablesCreation >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable >> KqpSysColV0::InnerJoinSelect >> TxUsage::WriteToTopic_Demo_6 [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |82.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |82.3%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> DataShardSnapshots::LockedWritesLimitedPerKey [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit [GOOD] >> TxUsage::WriteToTopic_Demo_7 |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit [GOOD] Test command err: 2024-11-21T23:10:18.855302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:18.860945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:18.861130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019d9/r3tmp/tmpzdP7pT/pdisk_1.dat 2024-11-21T23:10:19.335623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:19.383443Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:19.443278Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:10:19.444314Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:10:19.444517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:19.444634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:19.457954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:19.602550Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:10:19.602627Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:10:19.602825Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:10:19.703918Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:10:19.704716Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:10:19.704835Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:10:19.705194Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:10:19.705393Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:10:19.705512Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:10:19.705860Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:10:19.707390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:19.708414Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:10:19.708498Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:10:19.742508Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:10:19.743472Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:10:19.743902Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:10:19.744193Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:10:19.802363Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:10:19.803113Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:10:19.803249Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:10:19.804917Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:10:19.805044Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:10:19.805110Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:10:19.805473Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:10:19.862263Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:10:19.862501Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:10:19.862630Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:10:19.862675Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:19.862715Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:10:19.862754Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:19.863178Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:19.863245Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:19.863769Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:10:19.863874Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:10:19.863994Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:19.864032Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:19.864132Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:10:19.864197Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:19.864257Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:19.864299Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:10:19.864352Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:10:19.864386Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:10:19.864423Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:10:19.864475Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:19.864600Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:10:19.864637Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:10:19.864731Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:19.864969Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:10:19.865044Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:10:19.865134Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:10:19.865193Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:10:19.865230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:10:19.865262Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:10:19.865289Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:19.865540Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:10:19.865576Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:10:19.865621Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:10:19.865654Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:19.865707Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:10:19.865734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:10:19.865772Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:10:19.865802Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:10:19.865824Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:10:19.866617Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:19.866667Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:19.866694Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:19.866734Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:10:19.866798Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:10:19.869088Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:10:19.869139Z ... 8, tableId# 2, last full compaction# 1970-01-01T00:00:04.044379Z 2024-11-21T23:11:15.815977Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T23:11:15.816064Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T23:11:15.816137Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2024-11-21T23:11:15.816247Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T23:11:15.816332Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T23:11:15.816440Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T23:11:15.816474Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T23:11:15.816506Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2024-11-21T23:11:15.816578Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2024-11-21T23:11:15.816613Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T23:11:15.816689Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T23:11:15.816721Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T23:11:15.816750Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2024-11-21T23:11:15.816790Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T23:11:15.822738Z node 7 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [7:944:2745], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:11:15.823238Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T23:11:15.823294Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T23:11:15.823337Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2024-11-21T23:11:15.823406Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T23:11:15.823548Z node 7 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [7:944:2745], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1716 } } ComputeActorStats { } 2024-11-21T23:11:15.824374Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:15.824713Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 9427 } } ComputeActorStats { } 2024-11-21T23:11:15.825427Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:15.840095Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2024-11-21T23:11:15.840339Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:631:2536], Recipient [7:719:2599]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T23:11:15.840421Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:11:15.840500Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2024-11-21T23:11:15.841780Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2024-11-21T23:11:15.842056Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:719:2599], Recipient [7:631:2536]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T23:11:15.842150Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:11:15.842226Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2024-11-21T23:11:16.375082Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T23:11:16.375188Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715667 ProcessProposeKqpTransaction 2024-11-21T23:11:16.376131Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd8fwzc624wsxbkb0tant5xv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YWI0OGI4YjQtMTQ3MGEwYWUtYjE4MjVhZjMtMzQxZmZlNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2024-11-21T23:11:16.391409Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:1068:2865], Recipient [7:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T23:11:16.391562Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:11:16.391657Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2024-11-21T23:11:16.391730Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2024-11-21T23:11:16.391834Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T23:11:16.391982Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:11:16.392045Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:11:16.392310Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:11:16.392361Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:11:16.392411Z node 7 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T23:11:16.392475Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:11:16.392504Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:11:16.392527Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:11:16.392552Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:11:16.392685Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T23:11:16.393097Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:1068:2865], 0} after executionsCount# 1 2024-11-21T23:11:16.393173Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1068:2865], 0} sends rowCount# 2, bytes# 72, quota rows left# 999, quota bytes left# 5242808, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:11:16.393280Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1068:2865], 0} finished in read 2024-11-21T23:11:16.393367Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:11:16.393393Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:11:16.393419Z node 7 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:11:16.393448Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:11:16.393494Z node 7 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:11:16.393518Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:11:16.393552Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T23:11:16.393613Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:11:16.393746Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T23:11:16.407131Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:1068:2865], Recipient [7:631:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T23:11:16.407225Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> TKeyValueTest::TestRenameWorksNewApi [GOOD] |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWritesLimitedPerKey [GOOD] Test command err: 2024-11-21T23:10:27.799204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:27.805164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:27.805349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019aa/r3tmp/tmpv1wiTQ/pdisk_1.dat 2024-11-21T23:10:28.235486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:28.288312Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:28.336903Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:10:28.338007Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:10:28.338265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:28.338454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:28.350132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:28.472071Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:10:28.472143Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:10:28.472365Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:10:28.534435Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:10:28.535196Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:10:28.535312Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:10:28.535681Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:10:28.535977Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:10:28.536083Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:10:28.536460Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:10:28.538294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:28.539989Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:10:28.540103Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:10:28.577532Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:10:28.578657Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:10:28.579130Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:10:28.579424Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:10:28.624288Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:10:28.625109Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:10:28.625229Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:10:28.627151Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:10:28.627248Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:10:28.627310Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:10:28.627776Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:10:28.649769Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:10:28.650004Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:10:28.650139Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:10:28.650183Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:28.650224Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:10:28.650261Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:28.650784Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:28.650869Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:28.651452Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:10:28.651577Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:10:28.651734Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:28.651792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:28.651840Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:10:28.651911Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:28.651983Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:28.652050Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:10:28.652094Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:10:28.652131Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:10:28.652189Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:10:28.652251Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:28.652394Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:10:28.652451Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:10:28.652552Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:28.652824Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:10:28.652884Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:10:28.653400Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:10:28.653493Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:10:28.653541Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:10:28.653579Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:10:28.653622Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:28.653947Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:10:28.653990Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:10:28.654027Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:10:28.654066Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:28.654161Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:10:28.654211Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:10:28.654262Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:10:28.654298Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:10:28.654379Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:10:28.655216Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:28.655272Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:28.655310Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:28.655365Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:10:28.655443Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:10:28.657908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:10:28.657965Z ... ng: false } RuntimeSettings { TimeoutMs: 300000 ExecType: DATA UseSpilling: false StatsMode: DQ_STATS_MODE_NONE } } TxBody: cleared Tasks TxBody: injected Locks 2024-11-21T23:11:17.195596Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [7:987:2792], Recipient [7:631:2536]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 987 RawX2: 30064773864 } TxBody: " \0018\000`\200\200\200\005jI\010\001\0329\n!\tY\001\000\000\000\000\000\000\021\000\000\001\000\000\020\000\001\030\001 \004)\000\001\205\000\000\000\000\0010\002\020\200\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T23:11:17.195696Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:11:17.195967Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [7:631:2536], Recipient [7:631:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:11:17.196012Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:11:17.196119Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:17.196431Z node 7 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 345, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T23:11:17.196546Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 345, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T23:11:17.196687Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CheckDataTx 2024-11-21T23:11:17.196777Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T23:11:17.196836Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T23:11:17.196889Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:11:17.196939Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:11:17.196998Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T23:11:17.197074Z node 7 :TX_DATASHARD TRACE: Activated operation [0:281474976715669] at 72075186224037888 2024-11-21T23:11:17.197129Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T23:11:17.197155Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:11:17.197179Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T23:11:17.197206Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T23:11:17.197263Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T23:11:17.197380Z node 7 :TX_DATASHARD TRACE: Operation [0:281474976715669] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193400 2024-11-21T23:11:17.197497Z node 7 :TX_DATASHARD TRACE: KqpCommitLock LockId: 345 DataShard: 72075186224037888 Generation: 1 Counter: 4 SchemeShard: 72057594046644480 PathId: 2 2024-11-21T23:11:17.197563Z node 7 :TX_DATASHARD TRACE: Committing changes lockId# 345 in localTid# 1001 shard# 72075186224037888 2024-11-21T23:11:17.197769Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T23:11:17.197840Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:11:17.197888Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T23:11:17.197937Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:11:17.197988Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T23:11:17.198025Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is DelayComplete 2024-11-21T23:11:17.198060Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:11:17.198112Z node 7 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:11:17.198159Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:11:17.198209Z node 7 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T23:11:17.198235Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:11:17.198272Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037888 has finished 2024-11-21T23:11:17.207498Z node 7 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:17.207608Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T23:11:17.207680Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T23:11:17.207823Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715669 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 818 } } ComputeActorStats { } 2024-11-21T23:11:17.667586Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] Handle TEvExecuteKqpTransaction 2024-11-21T23:11:17.667685Z node 7 :TX_PROXY DEBUG: actor# [7:52:2099] TxId# 281474976715670 ProcessProposeKqpTransaction 2024-11-21T23:11:17.668653Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd8fx0qr1kxtg7brympzmwdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YmUzMTZmZGItNTJhZTdlZGUtOTYxOTBkMWQtNzg5ZDkyOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2024-11-21T23:11:17.671104Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:1009:2817], Recipient [7:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2024-11-21T23:11:17.671255Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:11:17.671338Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2024-11-21T23:11:17.671398Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3000/18446744073709551615 2024-11-21T23:11:17.671491Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2024-11-21T23:11:17.671616Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T23:11:17.671674Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:11:17.671729Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:11:17.671776Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:11:17.671840Z node 7 :TX_DATASHARD TRACE: Activated operation [0:11] at 72075186224037888 2024-11-21T23:11:17.671894Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T23:11:17.671923Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:11:17.671964Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:11:17.671988Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:11:17.672129Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2024-11-21T23:11:17.672451Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:1009:2817], 0} after executionsCount# 1 2024-11-21T23:11:17.672532Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1009:2817], 0} sends rowCount# 3, bytes# 72, quota rows left# 998, quota bytes left# 5242808, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:11:17.672645Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:1009:2817], 0} finished in read 2024-11-21T23:11:17.672735Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T23:11:17.672767Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:11:17.672793Z node 7 :TX_DATASHARD TRACE: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:11:17.672826Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:11:17.672881Z node 7 :TX_DATASHARD TRACE: Execution status for [0:11] at 72075186224037888 is Executed 2024-11-21T23:11:17.672906Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:11:17.672945Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:11] at 72075186224037888 has finished 2024-11-21T23:11:17.673009Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:11:17.673147Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T23:11:17.674165Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:1009:2817], Recipient [7:631:2536]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2024-11-21T23:11:17.674244Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 23 } }, { items { uint32_value: 3 } items { uint32_value: 31 } } >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:146:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:146:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:150:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:152:2173] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:156:2057] recipient: [7:152:2173] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:155:2174] Leader for TabletID 72057594037927937 is [7:155:2174] sender: [7:225:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:156:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:155:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:155:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:211:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:160:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:161:2057] recipient: [9:159:2180] Leader for TabletID 72057594037927937 is [9:162:2181] sender: [9:163:2057] recipient: [9:159:2180] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:162:2181] Leader for TabletID 72057594037927937 is [9:162:2181] sender: [9:215:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:162:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:165:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:166:2057] recipient: [10:164:2185] Leader for TabletID 72057594037927937 is [10:167:2186] sender: [10:168:2057] recipient: [10:164:2185] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:167:2186] Leader for TabletID 72057594037927937 is [10:167:2186] sender: [10:237:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:165:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:166:2057] recipient: [11:164:2185] Leader for TabletID 72057594037927937 is [11:167:2186] sender: [11:168:2057] recipient: [11:164:2185] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:167:2186] Leader for TabletID 72057594037927937 is [11:167:2186] sender: [11:237:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 720 ... 37 is [15:146:2167] sender: [15:216:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:141:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:144:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:145:2057] recipient: [16:143:2166] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:147:2057] recipient: [16:143:2166] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:146:2167] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:216:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:147:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:148:2057] recipient: [17:146:2168] Leader for TabletID 72057594037927937 is [17:149:2169] sender: [17:150:2057] recipient: [17:146:2168] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:149:2169] Leader for TabletID 72057594037927937 is [17:149:2169] sender: [17:219:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:149:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:152:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:153:2057] recipient: [18:151:2173] Leader for TabletID 72057594037927937 is [18:154:2174] sender: [18:155:2057] recipient: [18:151:2173] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:154:2174] Leader for TabletID 72057594037927937 is [18:154:2174] sender: [18:224:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:149:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:152:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:153:2057] recipient: [19:151:2173] Leader for TabletID 72057594037927937 is [19:154:2174] sender: [19:155:2057] recipient: [19:151:2173] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:154:2174] Leader for TabletID 72057594037927937 is [19:154:2174] sender: [19:224:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:153:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:154:2057] recipient: [20:152:2173] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:156:2057] recipient: [20:152:2173] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:155:2174] Leader for TabletID 72057594037927937 is [20:155:2174] sender: [20:226:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:153:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:156:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:157:2057] recipient: [21:155:2176] Leader for TabletID 72057594037927937 is [21:158:2177] sender: [21:159:2057] recipient: [21:155:2176] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:158:2177] Leader for TabletID 72057594037927937 is [21:158:2177] sender: [21:211:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:157:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:160:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:161:2057] recipient: [22:159:2180] Leader for TabletID 72057594037927937 is [22:162:2181] sender: [22:163:2057] recipient: [22:159:2180] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:162:2181] Leader for TabletID 72057594037927937 is [22:162:2181] sender: [22:215:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:162:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:165:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:166:2057] recipient: [23:164:2185] Leader for TabletID 72057594037927937 is [23:167:2186] sender: [23:168:2057] recipient: [23:164:2185] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:167:2186] Leader for TabletID 72057594037927937 is [23:167:2186] sender: [23:237:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:162:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:165:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:166:2057] recipient: [24:164:2185] Leader for TabletID 72057594037927937 is [24:167:2186] sender: [24:168:2057] recipient: [24:164:2185] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:167:2186] Leader for TabletID 72057594037927937 is [24:167:2186] sender: [24:237:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:165:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:168:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:169:2057] recipient: [25:167:2187] Leader for TabletID 72057594037927937 is [25:170:2188] sender: [25:171:2057] recipient: [25:167:2187] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:170:2188] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> TYardTest::TestMultiYardHarakiri [GOOD] >> TYardTest::TestLogOwerwrite >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:158:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:159:2057] recipient: [7:157:2178] Leader for TabletID 72057594037927937 is [7:160:2179] sender: [7:161:2057] recipient: [7:157:2178] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:160:2179] Leader for TabletID 72057594037927937 is [7:160:2179] sender: [7:230:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:143:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:143:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:142:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:144:2166] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:148:2057] recipient: [12:144:2166] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:147:2167] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:217:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:147:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:150:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:151:2057] recipient: [13:149:2171] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:153:2057] recipient: [13:149:2171] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:152:2172] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:222:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:147:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:150:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:151:2057] recipient: [14:149:2171] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:153:2057] recipient: [14:149:2171] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:152:2172] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:222:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:149:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:152:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:153:2057] recipient: [15:151:2172] Leader for TabletID 72057594037927937 is [15:154:2173] sender: [15:155:2057] recipient: [15:151:2172] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:154:2173] Leader for TabletID 72057594037927937 is [15:154:2173] sender: [15:224:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:154:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:157:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:158:2057] recipient: [16:156:2177] Leader for TabletID 72057594037927937 is [16:159:2178] sender: [16:160:2057] recipient: [16:156:2177] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:159:2178] Leader for TabletID 72057594037927937 is [16:159:2178] sender: [16:229:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:154:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:156:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:158:2057] recipient: [17:157:2177] Leader for TabletID 72057594037927937 is [17:159:2178] sender: [17:160:2057] recipient: [17:157:2177] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:159:2178] Leader for TabletID 72057594037927937 is [17:159:2178] sender: [17:229:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:156:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:159:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:160:2057] recipient: [18:158:2178] Leader for TabletID 72057594037927937 is [18:161:2179] sender: [18:162:2057] recipient: [18:158:2178] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:161:2179] Leader for TabletID 72057594037927937 is [18:161:2179] sender: [18:231:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:161:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:164:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:165:2057] recipient: [19:163:2183] Leader for TabletID 72057594037927937 is [19:166:2184] sender: [19:167:2057] recipient: [19:163:2183] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:166:2184] Leader for TabletID 72057594037927937 is [19:166:2184] sender: [19:236:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:161:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:164:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:165:2057] recipient: [20:163:2183] Leader for TabletID 72057594037927937 is [20:166:2184] sender: [20:167:2057] recipient: [20:163:2183] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:166:2184] Leader for TabletID 72057594037927937 is [20:166:2184] sender: [20:236:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:164:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:167:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:168:2057] recipient: [21:166:2185] Leader for TabletID 72057594037927937 is [21:169:2186] sender: [21:170:2057] recipient: [21:166:2185] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:169:2186] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> TYardTest::TestLogOwerwrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:143:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:143:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:142:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:144:2166] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:146:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:148:2057] recipient: [12:144:2166] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:147:2167] Leader for TabletID 72057594037927937 is [12:147:2167] sender: [12:217:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:147:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:150:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:151:2057] recipient: [13:149:2171] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:153:2057] recipient: [13:149:2171] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:152:2172] Leader for TabletID 72057594037927937 is [13:152:2172] sender: [13:222:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:147:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:150:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:151:2057] recipient: [14:149:2171] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:153:2057] recipient: [14:149:2171] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:152:2172] Leader for TabletID 72057594037927937 is [14:152:2172] sender: [14:222:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:148:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:151:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:152:2057] recipient: [15:150:2171] Leader for TabletID 72057594037927937 is [15:153:2172] sender: [15:154:2057] recipient: [15:150:2171] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:153:2172] Leader for TabletID 72057594037927937 is [15:153:2172] sender: [15:223:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:153:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:156:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:157:2057] recipient: [16:155:2176] Leader for TabletID 72057594037927937 is [16:158:2177] sender: [16:159:2057] recipient: [16:155:2176] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:158:2177] Leader for TabletID 72057594037927937 is [16:158:2177] sender: [16:228:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:153:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:156:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:157:2057] recipient: [17:155:2176] Leader for TabletID 72057594037927937 is [17:158:2177] sender: [17:159:2057] recipient: [17:155:2176] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:158:2177] Leader for TabletID 72057594037927937 is [17:158:2177] sender: [17:228:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:154:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:156:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:158:2057] recipient: [18:157:2176] Leader for TabletID 72057594037927937 is [18:159:2177] sender: [18:160:2057] recipient: [18:157:2176] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:159:2177] Leader for TabletID 72057594037927937 is [18:159:2177] sender: [18:229:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] >> TTxLocatorTest::TestAllocateAllByPieces >> KqpScanArrowFormat::AggregateCountStar >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:149:2057] recipient: [4:147:2169] Leader for TabletID 72057594037927937 is [4:150:2170] sender: [4:151:2057] recipient: [4:147:2169] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:150:2170] Leader for TabletID 72057594037927937 is [4:150:2170] sender: [4:220:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:150:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:150:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:141:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:145:2057] recipient: [10:143:2166] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:147:2057] recipient: [10:143:2166] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:146:2167] Leader for TabletID 72057594037927937 is [10:146:2167] sender: [10:216:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:141:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:144:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:145:2057] recipient: [11:143:2166] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:147:2057] recipient: [11:143:2166] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:146:2167] Leader for TabletID 72057594037927937 is [11:146:2167] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:145:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:148:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:149:2057] recipient: [12:147:2169] Leader for TabletID 72057594037927937 is [12:150:2170] sender: [12:151:2057] recipient: [12:147:2169] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:150:2170] Leader for TabletID 72057594037927937 is [12:150:2170] sender: [12:220:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] >> TPersQueueTest::InflightLimit [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] >> KqpScanArrowFormat::SingleKey >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2024-11-21T23:11:23.734522Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:11:23.735057Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:11:23.735811Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:11:23.737589Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.738084Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:11:23.748750Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.748864Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.748921Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.749005Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:11:23.749202Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.749318Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:11:23.749464Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:11:23.750185Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#8796093022207 2024-11-21T23:11:23.750851Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.750915Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.751016Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2024-11-21T23:11:23.751048Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2024-11-21T23:11:23.755366Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#8796093022207 2024-11-21T23:11:23.756000Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.756089Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.756209Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2024-11-21T23:11:23.756248Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2024-11-21T23:11:23.756596Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#8796093022207 2024-11-21T23:11:23.756897Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.756931Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.756995Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2024-11-21T23:11:23.757033Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2024-11-21T23:11:23.757326Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#8796093022207 2024-11-21T23:11:23.757574Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.757616Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.757677Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2024-11-21T23:11:23.757708Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2024-11-21T23:11:23.758043Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#8796093022207 2024-11-21T23:11:23.758447Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.758524Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.758678Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2024-11-21T23:11:23.758721Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2024-11-21T23:11:23.759185Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2123] requested range size#8796093022207 2024-11-21T23:11:23.759566Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.759661Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.759767Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2024-11-21T23:11:23.759813Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:89:2123] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2024-11-21T23:11:23.760272Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:93:2127] requested range size#8796093022207 2024-11-21T23:11:23.760640Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.760712Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.760794Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2024-11-21T23:11:23.760825Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:93:2127] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2024-11-21T23:11:23.761310Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:97:2131] requested range size#8796093022207 2024-11-21T23:11:23.761638Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.761717Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.761823Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2024-11-21T23:11:23.761882Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:97:2131] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2024-11-21T23:11:23.762347Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:101:2135] requested range size#8796093022207 2024-11-21T23:11:23.762642Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.762701Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.762773Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2024-11-21T23:11:23.762814Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:101:2135] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2024-11-21T23:11:23.763296Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:105:2139] requested range size#8796093022207 2024-11-21T23:11:23.763648Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.763722Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.763796Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2024-11-21T23:11:23.763828Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:105:2139] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2024-11-21T23:11:23.764339Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#8796093022207 2024-11-21T23:11:23.764692Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.764782Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.764859Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2024-11-21T23:11:23.764893Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:109:2143] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2024-11-21T23:11:23.765346Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#8796093022207 2024-11-21T23:11:23.765669Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.765775Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:149:2183] requested range size#8796093022207 2024-11-21T23:11:23.776815Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.776883Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.776992Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2024-11-21T23:11:23.777039Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:149:2183] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2024-11-21T23:11:23.777677Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:153:2187] requested range size#8796093022207 2024-11-21T23:11:23.778029Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.778133Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.778223Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2024-11-21T23:11:23.778266Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:153:2187] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2024-11-21T23:11:23.778953Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:157:2191] requested range size#8796093022207 2024-11-21T23:11:23.779303Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.779438Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.779523Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2024-11-21T23:11:23.779563Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:157:2191] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2024-11-21T23:11:23.780306Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:161:2195] requested range size#8796093022207 2024-11-21T23:11:23.780688Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.780790Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.780877Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2024-11-21T23:11:23.780917Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:161:2195] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2024-11-21T23:11:23.781711Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:165:2199] requested range size#8796093022207 2024-11-21T23:11:23.782037Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.782089Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.782172Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2024-11-21T23:11:23.782195Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:165:2199] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2024-11-21T23:11:23.782753Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:169:2203] requested range size#8796093022207 2024-11-21T23:11:23.783099Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.783184Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.783292Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2024-11-21T23:11:23.783330Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:169:2203] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2024-11-21T23:11:23.784121Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:173:2207] requested range size#8796093022207 2024-11-21T23:11:23.784406Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.784447Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.784507Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2024-11-21T23:11:23.784530Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:173:2207] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2024-11-21T23:11:23.784994Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:177:2211] requested range size#8796093022207 2024-11-21T23:11:23.785301Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.785394Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.785487Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2024-11-21T23:11:23.785526Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:177:2211] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2024-11-21T23:11:23.786367Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:181:2215] requested range size#8796093022207 2024-11-21T23:11:23.786762Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.786813Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.786902Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2024-11-21T23:11:23.786938Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:181:2215] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2024-11-21T23:11:23.787695Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:185:2219] requested range size#8796093022207 2024-11-21T23:11:23.788020Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.788110Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.788196Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2024-11-21T23:11:23.788248Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:185:2219] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2024-11-21T23:11:23.788958Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:189:2223] requested range size#8796093022207 2024-11-21T23:11:23.789226Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.789267Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.789318Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2024-11-21T23:11:23.789361Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:189:2223] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2024-11-21T23:11:23.790113Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:193:2227] requested range size#8796093022207 2024-11-21T23:11:23.790479Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.790574Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.790657Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2024-11-21T23:11:23.790706Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:193:2227] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2024-11-21T23:11:23.791499Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:197:2231] requested range size#31 2024-11-21T23:11:23.791761Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.791804Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:11:23.791858Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2024-11-21T23:11:23.791892Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:197:2231] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2024-11-21T23:11:23.792470Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:201:2235] requested range size#1 2024-11-21T23:11:23.792589Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2024-11-21T23:11:23.792620Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:201:2235] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestLogOwerwrite [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable [GOOD] >> TxUsage::WriteToTopic_Demo_36 [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> KqpScanArrowInChanels::AggregateNoColumn >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> KqpScanArrowInChanels::AllTypesColumns >> TxUsage::WriteToTopic_Demo_37 >> KqpSysColV0::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2024-11-21T23:04:48.542138Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872437692470785:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:48.554426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:04:48.791249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439872438830939545:2240];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:49.141918Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:04:49.141198Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001aad/r3tmp/tmpP4V0Th/pdisk_1.dat 2024-11-21T23:04:49.890197Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:04:49.958167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:52.558544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:52.582515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:53.038624Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:53.043787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:53.043893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:53.055177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:53.401138Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.243856s 2024-11-21T23:04:53.401462Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.244429s 2024-11-21T23:04:53.485676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:53.485755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18204, node 1 2024-11-21T23:04:53.519456Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:04:53.574522Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872437692470785:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:53.594922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:04:53.602302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:04:53.751980Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439872438830939545:2240];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:53.752078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:04:53.836792Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001aad/r3tmp/yandex5eZlm0.tmp 2024-11-21T23:04:53.836825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001aad/r3tmp/yandex5eZlm0.tmp 2024-11-21T23:04:53.856945Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001aad/r3tmp/yandex5eZlm0.tmp 2024-11-21T23:04:53.857148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:04:53.964200Z INFO: TTestServer started on Port 6492 GrpcPort 18204 TClient is connected to server localhost:6492 PQClient connected to localhost:18204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:56.160788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:04:57.054831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:04:57.855569Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:04:57.940972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:05:08.037381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:05:08.037672Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:05:12.790634Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439872541910154866:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:12.791429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:12.792801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439872541910154893:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:05:12.880767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:05:13.152308Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439872541910154895:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:05:13.416205Z node 1 :KQP_PROXY ERROR: TraceId: "01jd8fhmb30bf9m2j0kfwgzfrp", Request deadline has expired for 4.517772s seconds 2024-11-21T23:05:13.949509Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439872546205122227:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:05:13.951667Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODAwN2VhMDItOWMwMTIxZTQtNDM2YjhhMGMtMWU1M2Y0NWE=, ActorId: [2:7439872541910154864:2301], ActorState: ExecuteState, TraceId: 01jd8fhwsf20g03rr1d97s4dyz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:05:13.954864Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439872545066654588:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:05:13.956775Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjhlMDYzMTktZTdiMmVmY2MtYjFiZmQyYWUtNGRjZWQwODc=, ActorId: [1:7439872545066654544:2328], ActorState: ExecuteState, TraceId: 01jd8fhxe2703pqew99a258rq7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:05:13.965637Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:05:13.978169Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:05:14.021348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:05:14.565228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:05:15.306691Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439872549361622125:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because i ... 230663438 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1732230663521 CreateTimestampMS: 1732230663506 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1732230663554 CreateTimestampMS: 1732230663553 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3127 } Cookie: 0 } 2024-11-21T23:11:11.289994Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_8538363639053512659_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T23:11:11.290068Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_8538363639053512659_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 9ffd624-b556d0f7-3c29c129-94f64096 has messages 1 2024-11-21T23:11:11.290200Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_8538363639053512659_v1 read done: guid# 9ffd624-b556d0f7-3c29c129-94f64096, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2024-11-21T23:11:11.290237Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_8538363639053512659_v1 response to read: guid# 9ffd624-b556d0f7-3c29c129-94f64096 2024-11-21T23:11:11.299792Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_8538363639053512659_v1 Process answer. Aval parts: 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2024-11-21T23:11:11.310836Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_8538363639053512659_v1 grpc read done: success# 0, data# { } 2024-11-21T23:11:11.310871Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_8538363639053512659_v1 grpc read failed 2024-11-21T23:11:11.310895Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_8538363639053512659_v1 grpc closed 2024-11-21T23:11:11.310929Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_8538363639053512659_v1 is DEAD 2024-11-21T23:11:11.312235Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:11:11.312281Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_2_8538363639053512659_v1 2024-11-21T23:11:11.312341Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7439874069596281789:2554] destroyed 2024-11-21T23:11:11.312403Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_2_8538363639053512659_v1 2024-11-21T23:11:13.470492Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] TPersQueueReadBalancer::HandleWakeup 2024-11-21T23:11:13.502586Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2024-11-21T23:11:13.506955Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:11:13.526894Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2024-11-21T23:11:13.650240Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 1048584 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10240 BurstSize: 10240 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2024-11-21T23:11:13.734977Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2024-11-21T23:11:13.763302Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] ProcessPendingStats. PendingUpdates size 1 2024-11-21T23:11:15.298469Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T23:11:15.298546Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T23:11:15.298682Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2024-11-21T23:11:15.298715Z node 27 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T23:11:15.298928Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:11:15.299612Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_13398870673090997775_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1732230663414 CreateTimestampMS: 1732230663414 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1732230663455 CreateTimestampMS: 1732230663438 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1732230663521 CreateTimestampMS: 1732230663506 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1732230663554 CreateTimestampMS: 1732230663553 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7122 } Cookie: 0 } 2024-11-21T23:11:15.300127Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_13398870673090997775_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T23:11:15.300204Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_13398870673090997775_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 92306645-2afdb099-c143a086-f1d0547b has messages 1 2024-11-21T23:11:15.300364Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_13398870673090997775_v1 read done: guid# 92306645-2afdb099-c143a086-f1d0547b, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82616 2024-11-21T23:11:15.300400Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_13398870673090997775_v1 response to read: guid# 92306645-2afdb099-c143a086-f1d0547b 2024-11-21T23:11:15.300862Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_13398870673090997775_v1 Process answer. Aval parts: 0 Bytes readed: 82616 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2024-11-21T23:11:15.347811Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_13398870673090997775_v1 grpc read done: success# 0, data# { } 2024-11-21T23:11:15.347854Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_13398870673090997775_v1 grpc read failed 2024-11-21T23:11:15.347888Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_13398870673090997775_v1 grpc closed 2024-11-21T23:11:15.347928Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_13398870673090997775_v1 is DEAD 2024-11-21T23:11:15.350619Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:11:15.350663Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_3_13398870673090997775_v1 2024-11-21T23:11:15.350717Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7439874069596281792:2556] destroyed 2024-11-21T23:11:15.350780Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_3_13398870673090997775_v1 2024-11-21T23:11:16.098524Z node 27 :KQP_COMPUTE WARN: SelfId: [27:7439874103956020544:2612], TxId: 281474976710708, task: 1, CA Id [27:7439874103956020542:2612]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2024-11-21T23:11:16.139476Z node 27 :KQP_COMPUTE WARN: SelfId: [27:7439874103956020544:2612], TxId: 281474976710708, task: 1, CA Id [27:7439874103956020542:2612]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:11:16.174615Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:11:16.214960Z node 27 :KQP_COMPUTE WARN: SelfId: [27:7439874103956020544:2612], TxId: 281474976710708, task: 1, CA Id [27:7439874103956020542:2612]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:11:16.386333Z node 27 :KQP_COMPUTE WARN: SelfId: [27:7439874103956020544:2612], TxId: 281474976710708, task: 1, CA Id [27:7439874103956020542:2612]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:11:16.479544Z node 27 :KQP_COMPUTE WARN: SelfId: [27:7439874103956020544:2612], TxId: 281474976710708, task: 1, CA Id [27:7439874103956020542:2612]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:11:16.685423Z node 27 :KQP_COMPUTE WARN: SelfId: [27:7439874103956020544:2612], TxId: 281474976710708, task: 1, CA Id [27:7439874103956020542:2612]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:11:17.046576Z node 27 :KQP_COMPUTE WARN: SelfId: [27:7439874103956020544:2612], TxId: 281474976710708, task: 1, CA Id [27:7439874103956020542:2612]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:11:17.265506Z node 27 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710709. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:11:17.278919Z node 27 :KQP_EXECUTER WARN: ActorId: [27:7439874108250987913:2613] TxId: 281474976710709. Ctx: { TraceId: 01jd8fwzpa2v8mn7wjhr983xbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=NDhhYTJiZGMtODRhZTk3MWEtZjE5NmQ0ZmEtNjNiMjU5Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:11:17.332123Z node 27 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=27&id=NDhhYTJiZGMtODRhZTk3MWEtZjE5NmQ0ZmEtNjNiMjU5Nzc=, ActorId: [27:7439874103956020561:2613], ActorState: ExecuteState, TraceId: 01jd8fwzpa2v8mn7wjhr983xbz, Create QueryResponse for error on request, msg: 2024-11-21T23:11:17.347234Z node 27 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fx0gpb7gx044wea8seb2h" } } YdbStatus: UNAVAILABLE ConsumedRu: 590 } 2024-11-21T23:11:17.563328Z node 27 :KQP_COMPUTE WARN: SelfId: [27:7439874103956020544:2612], TxId: 281474976710708, task: 1, CA Id [27:7439874103956020542:2612]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:11:18.508548Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2024-11-21T23:10:31.089861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873910558224275:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:31.089915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:31.446911Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:10:31.454946Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001f3e/r3tmp/tmpfa6Yuf/pdisk_1.dat 2024-11-21T23:10:31.649533Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:31.956681Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:31.980781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:31.980873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:31.990855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:31.990900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:32.007274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:32.013294Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:10:32.014274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26456, node 1 2024-11-21T23:10:32.222999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001f3e/r3tmp/yandexWRhbYP.tmp 2024-11-21T23:10:32.223022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001f3e/r3tmp/yandexWRhbYP.tmp 2024-11-21T23:10:32.223164Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001f3e/r3tmp/yandexWRhbYP.tmp 2024-11-21T23:10:32.223250Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:32.276561Z INFO: TTestServer started on Port 19990 GrpcPort 26456 TClient is connected to server localhost:19990 PQClient connected to localhost:26456 === TenantModeEnabled() = 0 === Init PQ - start server on port 26456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:32.800250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:10:32.800447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.800678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T23:10:32.800910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:32.800934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.805088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:32.805229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:10:32.805397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.805449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:10:32.805466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T23:10:32.805478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 2024-11-21T23:10:32.809400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.809445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:10:32.809457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:10:32.811685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.811718Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.811758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:10:32.811795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 waiting... 2024-11-21T23:10:32.816408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:10:32.818469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:10:32.818495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T23:10:32.818521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:10:32.818631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2024-11-21T23:10:32.818752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:10:32.821446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230632867, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:32.821607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7439873910558224669 RawX2: 4294969650 } } Step: 1732230632867 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T23:10:32.821637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:10:32.821891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:10:32.821925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:10:32.822077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:10:32.822116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:10:32.827812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:32.827844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:10:32.828015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:32.828034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439873910558224688:2370], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T23:10:32.828071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.828091Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:10:32.828179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:10:32.828201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T23:10:32.828222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2024-11-21T23:10:32.828244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T23:10:32.828259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:10:32.828272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T23:10:32.828332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T23:10:32.828348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2024-11-21T23:10:32.828360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2024-11-21T23:10:32.844167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, ... register reading session ReadingSession "shared/cli_5_1_17431870698674409831_v1" (Sender=[5:7439874129630400393:2627], Pipe=[5:7439874129630400396:2627], Partitions=[], ActiveFamilyCount=0) 2024-11-21T23:11:22.496129Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli rebalancing was scheduled 2024-11-21T23:11:22.496214Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T23:11:22.496293Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_17431870698674409831_v1" (Sender=[5:7439874129630400393:2627], Pipe=[5:7439874129630400396:2627], Partitions=[], ActiveFamilyCount=0) 2024-11-21T23:11:22.496385Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_17431870698674409831_v1" sender [5:7439874129630400393:2627] lock partition 0 for ReadingSession "shared/cli_5_1_17431870698674409831_v1" (Sender=[5:7439874129630400393:2627], Pipe=[5:7439874129630400396:2627], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2024-11-21T23:11:22.496473Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T23:11:22.496512Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000267s 2024-11-21T23:11:22.498540Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_17431870698674409831_v1" ClientId: "cli" PipeClient { RawX1: 7439874129630400396 RawX2: 4503621102209603 } Path: "/Root/PQ/rt3.dc1--topic1" } 2024-11-21T23:11:22.498677Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T23:11:22.502763Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1 2024-11-21T23:11:22.503062Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_17431870698674409831_v1:1 with generation 1 2024-11-21T23:11:22.513309Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1732230682473 } Cookie: 18446744073709551615 } 2024-11-21T23:11:22.513381Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2024-11-21T23:11:22.513461Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 sending to client partition status 2024-11-21T23:11:22.514767Z :INFO: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (empty maybe) 2024-11-21T23:11:22.515228Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2024-11-21T23:11:22.515316Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T23:11:22.515358Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T23:11:22.515383Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2024-11-21T23:11:22.515438Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2024-11-21T23:11:22.515453Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1TEvPartitionReady. Aval parts: 1 2024-11-21T23:11:22.515488Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 performing read request: guid# e67b18bd-2bcf30fa-83f4f0e5-f39f5f15, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T23:11:22.515600Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid e67b18bd-2bcf30fa-83f4f0e5-f39f5f15 2024-11-21T23:11:22.517021Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1732230682314 CreateTimestampMS: 1732230682304 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1732230682379 CreateTimestampMS: 1732230682305 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1732230682404 CreateTimestampMS: 1732230682305 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T23:11:22.517258Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2024-11-21T23:11:22.517300Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid e67b18bd-2bcf30fa-83f4f0e5-f39f5f15 has messages 1 2024-11-21T23:11:22.517394Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 read done: guid# e67b18bd-2bcf30fa-83f4f0e5-f39f5f15, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2024-11-21T23:11:22.517430Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 response to read: guid# e67b18bd-2bcf30fa-83f4f0e5-f39f5f15 2024-11-21T23:11:22.517630Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 Process answer. Aval parts: 0 2024-11-21T23:11:22.518366Z :DEBUG: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2024-11-21T23:11:22.518540Z :DEBUG: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2024-11-21T23:11:22.518776Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2024-11-21T23:11:22.518828Z :DEBUG: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] Returning serverBytesSize = 490 to budget 2024-11-21T23:11:22.518866Z :DEBUG: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2024-11-21T23:11:22.519102Z :DEBUG: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T23:11:22.519261Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T23:11:22.519311Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T23:11:22.519345Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2024-11-21T23:11:22.519404Z :DEBUG: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2024-11-21T23:11:22.519456Z :DEBUG: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:11:22.519695Z :INFO: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] Closing read session. Close timeout: 0.000000s 2024-11-21T23:11:22.519741Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2024-11-21T23:11:22.519788Z :INFO: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] Counters: { Errors: 0 CurrentSessionLifetimeMs: 81 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:11:22.519928Z :NOTICE: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:11:22.519971Z :DEBUG: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] [] Abort session to cluster 2024-11-21T23:11:22.520447Z :NOTICE: [] [] [9fdd7414-7c7a0cb0-8ecd9988-c24737ad] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:11:22.523738Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2024-11-21T23:11:22.523785Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 grpc closed 2024-11-21T23:11:22.523824Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_17431870698674409831_v1 is DEAD 2024-11-21T23:11:22.525805Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7439874129630400396:2627] disconnected; active server actors: 1 2024-11-21T23:11:22.525844Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7439874129630400396:2627] client cli disconnected session shared/cli_5_1_17431870698674409831_v1 2024-11-21T23:11:22.526152Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_17431870698674409831_v1 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2024-11-21T23:10:34.307045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873926164979907:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:34.307098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:34.505284Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873922825502639:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:34.505334Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:34.795988Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:10:34.826964Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001ebc/r3tmp/tmptoeByo/pdisk_1.dat 2024-11-21T23:10:35.371154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:35.387461Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:35.430942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:35.431068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:35.431889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:35.431979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:35.434257Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:10:35.434854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:35.435519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4749, node 1 2024-11-21T23:10:35.662931Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001ebc/r3tmp/yandexWLHxE5.tmp 2024-11-21T23:10:35.662958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001ebc/r3tmp/yandexWLHxE5.tmp 2024-11-21T23:10:35.663080Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001ebc/r3tmp/yandexWLHxE5.tmp 2024-11-21T23:10:35.663171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:35.738139Z INFO: TTestServer started on Port 22815 GrpcPort 4749 TClient is connected to server localhost:22815 PQClient connected to localhost:4749 === TenantModeEnabled() = 0 === Init PQ - start server on port 4749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:36.407832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:10:36.408051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:36.408223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T23:10:36.408423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:10:36.408461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:36.410408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:36.410525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:10:36.410654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:36.410691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:10:36.410703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2024-11-21T23:10:36.410713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2024-11-21T23:10:36.412503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:36.412536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:10:36.412549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2024-11-21T23:10:36.414059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:36.414091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:36.414108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:10:36.414132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2024-11-21T23:10:36.418481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:10:36.418800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:10:36.418819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2024-11-21T23:10:36.418832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:10:36.420238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2024-11-21T23:10:36.420343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:10:36.422988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230636465, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:10:36.423120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 7439873930459947823 RawX2: 4294969659 } } Step: 1732230636465 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T23:10:36.423149Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:10:36.423358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2024-11-21T23:10:36.423388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet 72057594046644480 2024-11-21T23:10:36.423544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:10:36.423578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:10:36.426194Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:10:36.426216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:10:36.426372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:10:36.426413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439873930459947841:2378], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2024-11-21T23:10:36.426448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:36.426466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:10:36.426541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2024-11-21T23:10:36.426553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T23:10:36.426579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2024-11-21T23:10:36.426596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2024-11-21T23:10:36.426608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:10:36.426624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T23:10:36.426661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72 ... Direct read cache: registered server session: shared/cli_5_1_10931703335058888058_v1:1 with generation 1 2024-11-21T23:11:22.348050Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1732230682202 } Cookie: 18446744073709551615 } 2024-11-21T23:11:22.348117Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2024-11-21T23:11:22.348270Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 sending to client partition status 2024-11-21T23:11:22.351339Z :INFO: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (empty maybe) 2024-11-21T23:11:22.351944Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2024-11-21T23:11:22.352071Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T23:11:22.352117Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T23:11:22.352157Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2024-11-21T23:11:22.352222Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2024-11-21T23:11:22.352239Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1TEvPartitionReady. Aval parts: 1 2024-11-21T23:11:22.352282Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 performing read request: guid# ac086862-922a5f8a-a791f072-c8536a3c, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T23:11:22.352361Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid ac086862-922a5f8a-a791f072-c8536a3c 2024-11-21T23:11:22.354515Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1732230681994 CreateTimestampMS: 1732230681978 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1732230682004 CreateTimestampMS: 1732230681978 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1732230682026 CreateTimestampMS: 1732230681978 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T23:11:22.354723Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2024-11-21T23:11:22.354763Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid ac086862-922a5f8a-a791f072-c8536a3c has messages 1 2024-11-21T23:11:22.354886Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 read done: guid# ac086862-922a5f8a-a791f072-c8536a3c, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 488 2024-11-21T23:11:22.354923Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 response to read: guid# ac086862-922a5f8a-a791f072-c8536a3c 2024-11-21T23:11:22.355155Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 Process answer. Aval parts: 0 2024-11-21T23:11:22.356164Z :DEBUG: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] Got ReadResponse, serverBytesSize = 488, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428312 2024-11-21T23:11:22.356302Z :DEBUG: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428312 2024-11-21T23:11:22.356559Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2024-11-21T23:11:22.356607Z :DEBUG: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] Returning serverBytesSize = 488 to budget 2024-11-21T23:11:22.356647Z :DEBUG: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] In ContinueReadingDataImpl, ReadSizeBudget = 488, ReadSizeServerDelta = 52428312 2024-11-21T23:11:22.356860Z :DEBUG: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T23:11:22.357015Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T23:11:22.357057Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2024-11-21T23:11:22.357089Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2024-11-21T23:11:22.357144Z :DEBUG: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2024-11-21T23:11:22.357193Z :DEBUG: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:11:22.357384Z :INFO: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] Closing read session. Close timeout: 0.000000s 2024-11-21T23:11:22.357429Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2024-11-21T23:11:22.357476Z :INFO: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] Counters: { Errors: 0 CurrentSessionLifetimeMs: 146 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:11:22.357592Z :NOTICE: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:11:22.357632Z :DEBUG: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] [] Abort session to cluster 2024-11-21T23:11:22.358102Z :NOTICE: [] [] [f83d1fd8-f9c27984-d78fc228-e9fddc32] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:11:22.361560Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 grpc read done: success# 1, data# { read_request { bytes_size: 488 } } 2024-11-21T23:11:22.361612Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 grpc closed 2024-11-21T23:11:22.361650Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_10931703335058888058_v1 is DEAD 2024-11-21T23:11:22.362624Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7439874129095070883:2603] disconnected; active server actors: 1 2024-11-21T23:11:22.362664Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7439874129095070883:2603] client cli disconnected session shared/cli_5_1_10931703335058888058_v1 2024-11-21T23:11:22.363618Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_10931703335058888058_v1 2024-11-21T23:11:22.729705Z node 5 :KQP_EXECUTER ERROR: ActorId: [5:7439874129095070900:2594] TxId: 281474976710706. Ctx: { TraceId: 01jd8fx5da6r379qed01y9phba, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTdjMGMxYTctOWQ3MDNhMmUtNDAxYThkNTQtNmNjYTA0ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 6 2024-11-21T23:11:22.755313Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7439874129095070909:2608], TxId: 281474976710706, task: 2. Ctx: { TraceId : 01jd8fx5da6r379qed01y9phba. SessionId : ydb://session/3?node_id=5&id=MTdjMGMxYTctOWQ3MDNhMmUtNDAxYThkNTQtNmNjYTA0ZDU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [5:7439874129095070900:2594], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:11:22.755776Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7439874129095070910:2609], TxId: 281474976710706, task: 4. Ctx: { TraceId : 01jd8fx5da6r379qed01y9phba. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=5&id=MTdjMGMxYTctOWQ3MDNhMmUtNDAxYThkNTQtNmNjYTA0ZDU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [5:7439874129095070900:2594], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:11:23.414995Z node 5 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710707. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:11:23.415166Z node 5 :KQP_EXECUTER WARN: ActorId: [5:7439874133390038235:2612] TxId: 281474976710707. Ctx: { TraceId: 01jd8fx6k374nq8gzhzs9nh2yp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDZhYzA5OTgtMzA4YjkyMGUtZTY2NTJjNTUtOThkZjkwNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:11:23.415606Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NDZhYzA5OTgtMzA4YjkyMGUtZTY2NTJjNTUtOThkZjkwNTQ=, ActorId: [5:7439874133390038232:2612], ActorState: ExecuteState, TraceId: 01jd8fx6k374nq8gzhzs9nh2yp, Create QueryResponse for error on request, msg: 2024-11-21T23:11:23.416929Z node 5 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fx6k374nq8gzhzw5qy3n0" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable [GOOD] Test command err: 2024-11-21T23:10:27.615250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:27.621884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:27.622107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019bc/r3tmp/tmpGuK39H/pdisk_1.dat 2024-11-21T23:10:28.027932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:28.074319Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:28.125505Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:10:28.126725Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:10:28.126990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:28.127160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:28.138937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:28.267860Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:10:28.267933Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:10:28.268076Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:10:28.392399Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:10:28.393131Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:10:28.393249Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:10:28.393612Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:10:28.393827Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:10:28.393949Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:10:28.394281Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:10:28.395898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:28.397068Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:10:28.397176Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:10:28.434507Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:10:28.435762Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:10:28.436312Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:10:28.436659Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:10:28.493815Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:10:28.494568Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:10:28.494725Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:10:28.496530Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:10:28.496624Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:10:28.496681Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:10:28.497034Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:10:28.543932Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:10:28.544151Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:10:28.544289Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:10:28.544331Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:28.544373Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:10:28.544414Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:28.544882Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:28.545012Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:28.545584Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:10:28.545694Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:10:28.545860Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:28.545905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:28.545953Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:10:28.546017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:28.546074Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:28.546126Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:10:28.546187Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:10:28.546227Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:10:28.546268Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:10:28.546333Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:28.546515Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:10:28.546583Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:10:28.546706Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:28.547001Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:10:28.547062Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:10:28.547169Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:10:28.547230Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:10:28.547307Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:10:28.547350Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:10:28.547405Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:28.547723Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:10:28.547773Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:10:28.547818Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:10:28.547886Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:28.547950Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:10:28.547985Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:10:28.548021Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:10:28.548137Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:10:28.548187Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:10:28.549665Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:10:28.549721Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:10:28.560613Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:28.560715Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:28.560754Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:28.560827Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... 23:11:23.972136Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:11:23.972335Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:924:2742], Recipient [8:924:2742]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:11:23.972388Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:11:23.972496Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:23.972781Z node 8 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T23:11:23.972890Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T23:11:23.973040Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2024-11-21T23:11:23.973197Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T23:11:23.973266Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T23:11:23.973328Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:11:23.973380Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:11:23.973453Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v1000/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v400/18446744073709551615 2024-11-21T23:11:23.973554Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2024-11-21T23:11:23.973614Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T23:11:23.973643Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:11:23.973668Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T23:11:23.973695Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T23:11:23.973762Z node 8 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715661 not found 2024-11-21T23:11:23.973820Z node 8 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715661 expected 1:0 found 0:0 2024-11-21T23:11:23.973955Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2024-11-21T23:11:23.974049Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T23:11:23.974078Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T23:11:23.974104Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:11:23.974130Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T23:11:23.974180Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2024-11-21T23:11:23.974221Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:11:23.974303Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:11:23.974351Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:11:23.974425Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2024-11-21T23:11:23.974468Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:11:23.974502Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2024-11-21T23:11:23.974591Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:23.974643Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2024-11-21T23:11:23.974726Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2024-11-21T23:11:23.974824Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:23.975426Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=MjRjOTZkNzItZWM3OTNkYjctMWUzY2VhMTItNDk3ZDg0ZTE=, ActorId: [8:802:2649], ActorState: ExecuteState, TraceId: 01jd8fx76z9ngxxy6rkg1tmfmp, Create QueryResponse for error on request, msg: 2024-11-21T23:11:23.976367Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8fx76z9ngxxy6rkg1tmfmp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MjRjOTZkNzItZWM3OTNkYjctMWUzY2VhMTItNDk3ZDg0ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:11:23.976806Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [8:952:2649], Recipient [8:924:2742]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 952 RawX2: 34359741017 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2024-11-21T23:11:23.976849Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:11:23.976958Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:924:2742], Recipient [8:924:2742]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:11:23.976986Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:11:23.977060Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:23.977266Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T23:11:23.977340Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T23:11:23.977378Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:11:23.977415Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T23:11:23.977442Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:11:23.977464Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:11:23.977499Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v1000/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2024-11-21T23:11:23.977543Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T23:11:23.977571Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:11:23.977628Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:11:23.977657Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T23:11:23.977701Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T23:11:23.977772Z node 8 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193454 2024-11-21T23:11:23.977898Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2024-11-21T23:11:23.977983Z node 8 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T23:11:23.978047Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:11:23.978072Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T23:11:23.978096Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:11:23.978117Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T23:11:23.978158Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T23:11:23.978238Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2024-11-21T23:11:23.978263Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:11:23.978286Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:11:23.978311Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:11:23.978359Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:11:23.978673Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:11:23.978728Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T23:11:23.978788Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:23.978821Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T23:11:23.978869Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:24.360031Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [8:969:2775], Recipient [8:924:2742]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:11:24.360158Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:11:24.360235Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:968:2774], serverId# [8:969:2775], sessionId# [0:0:0] 2024-11-21T23:11:24.360433Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [8:557:2484], Recipient [8:924:2742]: NKikimr::TEvDataShard::TEvGetOpenTxs |82.4%| [TA] $(B)/ydb/core/blobstorage/pdisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 3785, MsgBus: 64377 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00230f/r3tmp/tmpXt3MFz/pdisk_1.dat 2024-11-21T23:11:16.154122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874104950289546:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:16.711489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:11:17.237362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:17.237464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:17.258761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:17.328231Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3785, node 1 2024-11-21T23:11:17.495023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:17.495047Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:17.495068Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:17.495163Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64377 TClient is connected to server localhost:64377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:18.352787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:18.384180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:11:18.718814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:19.285108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:19.503214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:11:21.106721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874104950289546:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:21.106785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:23.055946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874135015062186:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:23.056081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:23.317375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:23.345073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:23.375611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:23.418225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:23.451306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:23.498375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:23.561698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874135015062690:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:23.561808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:23.562537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874135015062695:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:23.566653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:23.585317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874135015062697:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |82.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |82.4%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> TOlap::StoreStats >> TableCreation::CreateOldTable [GOOD] |82.4%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> KqpRm::DisonnectNodes >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TxUsage::ReadRuleGeneration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] Test command err: 2024-11-21T23:10:20.750693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:20.753942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:20.754123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019cd/r3tmp/tmpQll6yc/pdisk_1.dat 2024-11-21T23:10:21.222687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:21.261136Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:21.314068Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:10:21.315082Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:10:21.315274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:21.315394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:21.327782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:21.444333Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:10:21.444407Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:10:21.444546Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:10:21.568946Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:10:21.569542Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:10:21.569653Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:10:21.570019Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:10:21.570178Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:10:21.570268Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:10:21.576116Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:10:21.577517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:21.578689Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:10:21.578758Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:10:21.610224Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:10:21.611842Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:10:21.612282Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:10:21.612600Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:10:21.661316Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:10:21.662035Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:10:21.662133Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:10:21.663693Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:10:21.663774Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:10:21.663868Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:10:21.664251Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:10:21.701710Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:10:21.701911Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:10:21.702030Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:10:21.702062Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:21.702099Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:10:21.702131Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:21.702651Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:21.702751Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:21.703388Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:10:21.703486Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:10:21.703596Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:21.703620Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:21.703712Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:10:21.703758Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:21.703795Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:21.703832Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:10:21.703858Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:10:21.703884Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:10:21.703912Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:10:21.703952Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:21.704039Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:10:21.704079Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:10:21.704147Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:21.704392Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:10:21.704439Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:10:21.704515Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:10:21.704567Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:10:21.704605Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:10:21.704634Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:10:21.704658Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:21.704859Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:10:21.704887Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:10:21.704919Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:10:21.704945Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:21.704984Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:10:21.705002Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:10:21.705027Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:10:21.705045Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:10:21.705062Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:10:21.705607Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:21.705649Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:21.705679Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:21.705707Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:10:21.705750Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:10:21.707648Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:10:21.707700Z ... T23:11:24.361802Z node 8 :KQP_EXECUTER DEBUG: ActorId: [8:1939:3214] TxId: 281474976715669. Ctx: { TraceId: 01jd8fx7f40ymzee2t10fdvmpt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=N2RhOWMwMjgtMWNkZjg3ZGItNzI2N2U3YzEtNTc5YzkxNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002264s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 2024-11-21T23:11:24.361998Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2RhOWMwMjgtMWNkZjg3ZGItNzI2N2U3YzEtNTc5YzkxNDE=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd8fx7f40ymzee2t10fdvmpt, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2024-11-21T23:11:24.362221Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2RhOWMwMjgtMWNkZjg3ZGItNzI2N2U3YzEtNTc5YzkxNDE=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd8fx7f40ymzee2t10fdvmpt, txInfo Status: Committed Kind: ReadOnly TotalDuration: 22.723 ServerDuration: 22.613 QueriesCount: 2 2024-11-21T23:11:24.362379Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2RhOWMwMjgtMWNkZjg3ZGItNzI2N2U3YzEtNTc5YzkxNDE=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd8fx7f40ymzee2t10fdvmpt, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:11:24.362939Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=N2RhOWMwMjgtMWNkZjg3ZGItNzI2N2U3YzEtNTc5YzkxNDE=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd8fx7f40ymzee2t10fdvmpt, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:11:24.363032Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2RhOWMwMjgtMWNkZjg3ZGItNzI2N2U3YzEtNTc5YzkxNDE=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd8fx7f40ymzee2t10fdvmpt, EndCleanup, isFinal: 0 2024-11-21T23:11:24.363142Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=N2RhOWMwMjgtMWNkZjg3ZGItNzI2N2U3YzEtNTc5YzkxNDE=, ActorId: [8:1929:3214], ActorState: ExecuteState, TraceId: 01jd8fx7f40ymzee2t10fdvmpt, Sent query response back to proxy, proxyRequestId: 12, proxyId: [8:162:2148] { items { uint32_value: 1 } items { uint32_value: 10 } } 2024-11-21T23:11:24.477302Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [8:1994:3235], Recipient [8:1996:3236]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:11:24.482731Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [8:1994:3235], Recipient [8:1996:3236]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:11:24.482884Z node 8 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [8:1994:3235], Recipient [8:1996:3236]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:11:24.492183Z node 8 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [8:1996:3236] 2024-11-21T23:11:24.492541Z node 8 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:11:24.499629Z node 8 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:11:24.501223Z node 8 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:11:24.503758Z node 8 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:11:24.503857Z node 8 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:11:24.503965Z node 8 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:11:24.504405Z node 8 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:11:24.504481Z node 8 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2024-11-21T23:11:24.504620Z node 8 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 0 siblings to be activated: wait to activation from: 2024-11-21T23:11:24.504669Z node 8 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:11:24.504840Z node 8 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [8:2028:3261] 2024-11-21T23:11:24.504890Z node 8 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:11:24.504954Z node 8 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2024-11-21T23:11:24.505013Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:24.505093Z node 8 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2024-11-21T23:11:24.505818Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [8:1996:3236], Recipient [8:1996:3236]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:11:24.505900Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:11:24.506191Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvRegisterTablet TabletId# 72075186224037888 ProcessingParams { Version: 1 PlanResolution: 100 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 }} 2024-11-21T23:11:24.506346Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72075186224037888 2024-11-21T23:11:24.506464Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] SEND to Sender# [8:1996:3236] {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 1000} 2024-11-21T23:11:24.507017Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2024-11-21T23:11:24.507250Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 276168711, Sender [8:2029:3262], Recipient [8:1996:3236]: NKikimr::NDataShard::TEvChangeExchange::TEvSplitAck 2024-11-21T23:11:24.507847Z node 8 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:24.507937Z node 8 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2024-11-21T23:11:24.508020Z node 8 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:24.509289Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [8:25:2072], Recipient [8:1996:3236]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 1000} 2024-11-21T23:11:24.509354Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2024-11-21T23:11:24.509428Z node 8 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2024-11-21T23:11:24.509499Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:24.510842Z node 8 :TX_DATASHARD NOTICE: TTxChangeExchangeSplitAck Execute, at tablet# 72075186224037888 2024-11-21T23:11:24.510910Z node 8 :TX_DATASHARD NOTICE: TTxChangeExchangeSplitAck Complete, at tablet# 72075186224037888 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2024-11-21T23:11:24.517051Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 1000 NextAcquireStep: 1000 2024-11-21T23:11:24.517359Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [8:25:2072], Recipient [8:1996:3236]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 1000 NextReadStep# 1000 ReadStep# 1000 } 2024-11-21T23:11:24.517425Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2024-11-21T23:11:24.517503Z node 8 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1000 next step 1000 2024-11-21T23:11:24.640324Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 1100 2024-11-21T23:11:24.640669Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 1100 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 8 bucket 0 2024-11-21T23:11:24.641190Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1100} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 9 bucket 0 ... blocked step 1100 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 1100 at node 9 bucket 1 ... blocked step 1100 at node 9 bucket 1 2024-11-21T23:11:25.439365Z node 8 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 3000 in 1.000000s at 2.950000s 2024-11-21T23:11:25.442466Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 2000 2024-11-21T23:11:25.442690Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 2000 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 8 bucket 0 2024-11-21T23:11:25.443198Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 9 bucket 0 ... blocked step 2000 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 2000 at node 9 bucket 1 ... blocked step 2000 at node 9 bucket 1 2024-11-21T23:11:26.025771Z node 8 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 1.000000s at 3.950000s 2024-11-21T23:11:26.031984Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 3000 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2024-11-21T23:11:26.032450Z node 9 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [9:51:2063] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 3 NextAcquireStep: 3000 ... observed step 3000 at node 8 bucket 0 2024-11-21T23:11:26.032590Z node 8 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [8:25:2072] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 9 bucket 0 ... blocked step 3000 at node 9 bucket 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... observed step 3000 at node 9 bucket 1 ... blocked step 3000 at node 9 bucket 1 ... upsert finished before unblocking node 2 >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TableCreation::SimpleUpdateTable [GOOD] >> LocalPartition::DiscoveryServiceBadNodeId [GOOD] >> LocalPartition::WithoutPartition >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2024-11-21T23:11:14.576546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874094545406290:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:14.576615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000fc9/r3tmp/tmpa4VwIf/pdisk_1.dat 2024-11-21T23:11:15.438874Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:15.447138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:15.447238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:15.450326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62235 TServer::EnableGrpc on GrpcPort 29251, node 1 2024-11-21T23:11:16.099167Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:16.099194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:16.099201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:16.099324Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T23:11:16.530701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:16.576059Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:11:19.578709Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874094545406290:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:19.603664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:20.595752Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T23:11:20.597118Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T23:11:20.618019Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2024-11-21T23:11:20.618042Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2024-11-21T23:11:20.618084Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2024-11-21T23:11:20.618244Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2024-11-21T23:11:20.618249Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2024-11-21T23:11:20.618267Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2024-11-21T23:11:20.618374Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2024-11-21T23:11:20.618378Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2024-11-21T23:11:20.618409Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2024-11-21T23:11:20.623334Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:11:20.623453Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:11:20.623561Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T23:11:20.623590Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T23:11:20.623627Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T23:11:20.635593Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:11:20.642642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2024-11-21T23:11:20.645619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:11:20.647382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:11:20.674321Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2024-11-21T23:11:20.674992Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2024-11-21T23:11:20.675072Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2024-11-21T23:11:20.675158Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2024-11-21T23:11:20.675171Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2024-11-21T23:11:20.690497Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2024-11-21T23:11:21.031712Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2024-11-21T23:11:21.091126Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2024-11-21T23:11:21.125962Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2024-11-21T23:11:21.133181Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2024-11-21T23:11:21.191852Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2024-11-21T23:11:21.191975Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2024-11-21T23:11:21.193067Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 8f749fe8-b089d767-74c6981f-c88b714c, Bootstrap. Database: /dc-1 2024-11-21T23:11:21.193596Z node 1 :KQP_PROXY DEBUG: Request has 18445011843028.358039s seconds to be completed 2024-11-21T23:11:21.196594Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=YjViNjk5YzgtYzBkZTMxOWYtY2M2M2ZmYzYtMmU2NDc5NTQ=, workerId: [1:7439874124610178038:2308], database: /dc-1, longSession: 1, local sessions count: 1 2024-11-21T23:11:21.196715Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T23:11:21.275200Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 8f749fe8-b089d767-74c6981f-c88b714c, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2024-11-21T23:11:21.278445Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YjViNjk5YzgtYzBkZTMxOWYtY2M2M2ZmYzYtMmU2NDc5NTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7439874124610178038:2308] 2024-11-21T23:11:21.278488Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7439874124610178040:2469] 2024-11-21T23:11:21.287184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874124610178041:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:21.287256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874124610178053:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:21.287300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:21.293844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2024-11-21T23:11:21.338642Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:11:21.339521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874124610178056:2314], DatabaseId: /dc-1, P ... _executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2024-11-21T23:11:27.414628Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NWM2ZGFkNWUtYzU1Nzg2MDQtZTMwNzA3ZTItNTcxMmU3ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7439874152239316982:2304] 2024-11-21T23:11:27.414673Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7439874152239316984:2457] 2024-11-21T23:11:27.415798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874152239316993:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:27.415858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874152239316985:2306], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:27.416151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:27.418680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2024-11-21T23:11:27.429546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874152239316999:2310], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:11:27.728967Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:7439874152239316983:2305], selfId: [2:7439874135059446954:2063], source: [2:7439874152239316982:2304] 2024-11-21T23:11:27.729172Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: cd7da280-1450cdfd-b45acf74-be843214, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWM2ZGFkNWUtYzU1Nzg2MDQtZTMwNzA3ZTItNTcxMmU3ZWE=, TxId: 2024-11-21T23:11:27.729198Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: cd7da280-1450cdfd-b45acf74-be843214, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWM2ZGFkNWUtYzU1Nzg2MDQtZTMwNzA3ZTItNTcxMmU3ZWE=, TxId: 2024-11-21T23:11:27.729210Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: cd7da280-1450cdfd-b45acf74-be843214. Result: SUCCESS. Issues: 2024-11-21T23:11:27.732074Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MmUxZWI4NGYtYmFjM2U3MS02ZWRjZDlmMi02MmZlYWVmNQ==, workerId: [2:7439874152239317092:2320], database: dc-1, longSession: 1, local sessions count: 2 2024-11-21T23:11:27.732214Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T23:11:27.732561Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NWM2ZGFkNWUtYzU1Nzg2MDQtZTMwNzA3ZTItNTcxMmU3ZWE=, workerId: [2:7439874152239316982:2304], local sessions count: 1 2024-11-21T23:11:27.732822Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MmUxZWI4NGYtYmFjM2U3MS02ZWRjZDlmMi02MmZlYWVmNQ==, CurrentExecutionId: cd7da280-1450cdfd-b45acf74-be843214, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7439874152239317092:2320] 2024-11-21T23:11:27.732853Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7439874152239317094:2523] 2024-11-21T23:11:27.770983Z node 2 :KQP_PROXY DEBUG: TraceId: "01jd8fxb0t7aekm4xqgccgyw03", Request has 18445011843021.780665s seconds to be completed 2024-11-21T23:11:27.772829Z node 2 :KQP_PROXY DEBUG: TraceId: "01jd8fxb0t7aekm4xqgccgyw03", Created new session, sessionId: ydb://session/3?node_id=2&id=YTZmYTJmZTYtMWM1ODFkZWEtMmM5YTAyMjEtNzc0NDJjYzY=, workerId: [2:7439874152239317108:2330], database: /dc-1, longSession: 1, local sessions count: 2 2024-11-21T23:11:27.772943Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jd8fxb0t7aekm4xqgccgyw03 2024-11-21T23:11:27.775379Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2024-11-21T23:11:27.775400Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2024-11-21T23:11:27.775428Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2024-11-21T23:11:27.777961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:1, at schemeshard: 72057594046644480 2024-11-21T23:11:27.779578Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2024-11-21T23:11:27.779635Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976710664 2024-11-21T23:11:27.787228Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: cd7da280-1450cdfd-b45acf74-be843214, Bootstrap. Database: /dc-1 2024-11-21T23:11:27.788156Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7439874152239316979:2455], selfId: [2:7439874135059446954:2063], source: [2:7439874152239317092:2320] 2024-11-21T23:11:27.788287Z node 2 :KQP_PROXY DEBUG: Request has 18445011843021.763341s seconds to be completed 2024-11-21T23:11:27.790221Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZDgyZDg5Y2MtMzllNTdlZjQtYmMyMTkxMWEtY2E0MzY2YTU=, workerId: [2:7439874152239317171:2333], database: /dc-1, longSession: 1, local sessions count: 3 2024-11-21T23:11:27.790351Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T23:11:27.790834Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: cd7da280-1450cdfd-b45acf74-be843214, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2024-11-21T23:11:27.791174Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZDgyZDg5Y2MtMzllNTdlZjQtYmMyMTkxMWEtY2E0MzY2YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7439874152239317171:2333] 2024-11-21T23:11:27.791217Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7439874152239317174:2562] 2024-11-21T23:11:27.852062Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976710664. Doublechecking... 2024-11-21T23:11:27.933933Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T23:11:27.935136Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T23:11:27.966471Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YTZmYTJmZTYtMWM1ODFkZWEtMmM5YTAyMjEtNzc0NDJjYzY=, workerId: [2:7439874152239317108:2330], local sessions count: 2 2024-11-21T23:11:28.015307Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7439874152239317172:2334], selfId: [2:7439874135059446954:2063], source: [2:7439874152239317171:2333] 2024-11-21T23:11:28.015719Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: cd7da280-1450cdfd-b45acf74-be843214, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDgyZDg5Y2MtMzllNTdlZjQtYmMyMTkxMWEtY2E0MzY2YTU=, TxId: 2024-11-21T23:11:28.015735Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: cd7da280-1450cdfd-b45acf74-be843214, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDgyZDg5Y2MtMzllNTdlZjQtYmMyMTkxMWEtY2E0MzY2YTU=, TxId: 2024-11-21T23:11:28.015900Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: cd7da280-1450cdfd-b45acf74-be843214, start saving rows range [0; 1) 2024-11-21T23:11:28.015981Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: cd7da280-1450cdfd-b45acf74-be843214, Bootstrap. Database: /dc-1 2024-11-21T23:11:28.016136Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZDgyZDg5Y2MtMzllNTdlZjQtYmMyMTkxMWEtY2E0MzY2YTU=, workerId: [2:7439874152239317171:2333], local sessions count: 1 2024-11-21T23:11:28.016196Z node 2 :KQP_PROXY DEBUG: Request has 18445011843021.535431s seconds to be completed 2024-11-21T23:11:28.017960Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OWUwNDVhMGEtZTI2N2IyZGYtNTI4MTVjMjctZmNlZmUwZTU=, workerId: [2:7439874156534284540:2347], database: /dc-1, longSession: 1, local sessions count: 2 2024-11-21T23:11:28.018094Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T23:11:28.018501Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: cd7da280-1450cdfd-b45acf74-be843214, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2024-11-21T23:11:28.018888Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OWUwNDVhMGEtZTI2N2IyZGYtNTI4MTVjMjctZmNlZmUwZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7439874156534284540:2347] 2024-11-21T23:11:28.018922Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7439874156534284542:2609] >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> Describe::Statistics [GOOD] >> Describe::Location >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> KqpRm::NotEnoughExecutionUnits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2024-11-21T23:11:14.599278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874094358704327:2073];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:14.617050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000fc7/r3tmp/tmpIAhEh6/pdisk_1.dat 2024-11-21T23:11:15.587111Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:15.587543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:15.587635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:15.606124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18027 TServer::EnableGrpc on GrpcPort 19890, node 1 2024-11-21T23:11:16.581286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:16.581318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:16.581327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:16.581436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T23:11:16.929050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:16.959236Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:11:19.581678Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874094358704327:2073];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:19.581770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:20.099597Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T23:11:20.106753Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T23:11:20.132052Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2024-11-21T23:11:20.132077Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2024-11-21T23:11:20.132113Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2024-11-21T23:11:20.132258Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2024-11-21T23:11:20.132266Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2024-11-21T23:11:20.132281Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2024-11-21T23:11:20.150194Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2024-11-21T23:11:20.155324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2024-11-21T23:11:20.157317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:11:20.158598Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2024-11-21T23:11:20.158604Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2024-11-21T23:11:20.158636Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2024-11-21T23:11:20.166990Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2024-11-21T23:11:20.167049Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2024-11-21T23:11:20.167092Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2024-11-21T23:11:20.167192Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:11:20.167236Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:11:20.178146Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2024-11-21T23:11:20.178326Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:11:20.178373Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:11:20.182926Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710659 2024-11-21T23:11:20.187514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:11:20.188525Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2024-11-21T23:11:20.188558Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2024-11-21T23:11:20.199831Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2024-11-21T23:11:20.199866Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2024-11-21T23:11:20.533982Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2024-11-21T23:11:20.595843Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2024-11-21T23:11:20.597560Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2024-11-21T23:11:20.597589Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2024-11-21T23:11:20.678453Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2024-11-21T23:11:20.691335Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2024-11-21T23:11:20.693232Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 43e5bb52-c6eaec1d-432af10b-2c12cc50, Bootstrap. Database: /dc-1 2024-11-21T23:11:20.693562Z node 1 :KQP_PROXY DEBUG: Request has 18445011843028.858075s seconds to be completed 2024-11-21T23:11:20.740817Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NzhhNWYwNmUtOTJlMGUxZDgtMmNmZmQ2ZWQtODc5Nzk4Y2I=, workerId: [1:7439874120128508941:2307], database: /dc-1, longSession: 1, local sessions count: 1 2024-11-21T23:11:20.740998Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T23:11:20.835167Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 43e5bb52-c6eaec1d-432af10b-2c12cc50, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2024-11-21T23:11:20.860558Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NzhhNWYwNmUtOTJlMGUxZDgtMmNmZmQ2ZWQtODc5Nzk4Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7439874120128508941:2307] 2024-11-21T23:11:20.860615Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7439874120128508948:2468] 2024-11-21T23:11:20.899529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874120128508949:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:20.903143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874120128508961:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:20.903309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:20.907751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2024-11-21T23:11:20.929384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [ ... : 5, targetId: [2:7439874155654772212:2320] 2024-11-21T23:11:28.090199Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7439874155654772214:2522] 2024-11-21T23:11:28.129291Z node 2 :KQP_PROXY DEBUG: TraceId: "01jd8fxbc0785vs58w9qh5jm6g", Request has 18445011843021.422360s seconds to be completed 2024-11-21T23:11:28.131315Z node 2 :KQP_PROXY DEBUG: TraceId: "01jd8fxbc0785vs58w9qh5jm6g", Created new session, sessionId: ydb://session/3?node_id=2&id=N2RlZmY1NDctOTY3Y2Y4YzQtYWRhMTgwYjMtMjYxZTVjNDQ=, workerId: [2:7439874155654772229:2330], database: /dc-1, longSession: 1, local sessions count: 2 2024-11-21T23:11:28.131446Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jd8fxbc0785vs58w9qh5jm6g 2024-11-21T23:11:28.135278Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2024-11-21T23:11:28.135297Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2024-11-21T23:11:28.135317Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2024-11-21T23:11:28.137177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2024-11-21T23:11:28.138261Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2024-11-21T23:11:28.138294Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2024-11-21T23:11:28.155132Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, Bootstrap. Database: /dc-1 2024-11-21T23:11:28.155519Z node 2 :KQP_PROXY DEBUG: Request has 18445011843021.396124s seconds to be completed 2024-11-21T23:11:28.157521Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NzY2MDdmZDgtNGFiZGE3ODAtMTI5Nzc4N2YtZWZkNDE0ZWU=, workerId: [2:7439874155654772294:2333], database: /dc-1, longSession: 1, local sessions count: 3 2024-11-21T23:11:28.157670Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T23:11:28.157981Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7439874151359804802:2453], selfId: [2:7439874138474902064:2060], source: [2:7439874155654772212:2320] 2024-11-21T23:11:28.158560Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2024-11-21T23:11:28.159692Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NzY2MDdmZDgtNGFiZGE3ODAtMTI5Nzc4N2YtZWZkNDE0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7439874155654772294:2333] 2024-11-21T23:11:28.159726Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7439874155654772297:2564] 2024-11-21T23:11:28.179174Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2024-11-21T23:11:28.278930Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T23:11:28.279766Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2024-11-21T23:11:28.279806Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2024-11-21T23:11:28.281529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:28.282866Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715666 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2024-11-21T23:11:28.282921Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715666 2024-11-21T23:11:28.320173Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2024-11-21T23:11:28.328014Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7439874155654772296:2335], selfId: [2:7439874138474902064:2060], source: [2:7439874155654772294:2333] 2024-11-21T23:11:28.328255Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzY2MDdmZDgtNGFiZGE3ODAtMTI5Nzc4N2YtZWZkNDE0ZWU=, TxId: 2024-11-21T23:11:28.328286Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzY2MDdmZDgtNGFiZGE3ODAtMTI5Nzc4N2YtZWZkNDE0ZWU=, TxId: 2024-11-21T23:11:28.328454Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 8166532f-b34c8981-a6ae5446-8ca6f474, start saving rows range [0; 1) 2024-11-21T23:11:28.328517Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, Bootstrap. Database: /dc-1 2024-11-21T23:11:28.328711Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NzY2MDdmZDgtNGFiZGE3ODAtMTI5Nzc4N2YtZWZkNDE0ZWU=, workerId: [2:7439874155654772294:2333], local sessions count: 2 2024-11-21T23:11:28.328816Z node 2 :KQP_PROXY DEBUG: Request has 18445011843021.222819s seconds to be completed 2024-11-21T23:11:28.330682Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NjczN2Y2NzItMTllNTJhMWYtYmVkN2Y5ZDEtNGVhZWEzNGQ=, workerId: [2:7439874155654772381:2346], database: /dc-1, longSession: 1, local sessions count: 3 2024-11-21T23:11:28.330792Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T23:11:28.331419Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2024-11-21T23:11:28.331985Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NjczN2Y2NzItMTllNTJhMWYtYmVkN2Y5ZDEtNGVhZWEzNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7439874155654772381:2346] 2024-11-21T23:11:28.332017Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7439874155654772383:2628] 2024-11-21T23:11:28.390625Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2024-11-21T23:11:28.447092Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=N2RlZmY1NDctOTY3Y2Y4YzQtYWRhMTgwYjMtMjYxZTVjNDQ=, workerId: [2:7439874155654772229:2330], local sessions count: 2 2024-11-21T23:11:28.523933Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7439874155654772382:2347], selfId: [2:7439874138474902064:2060], source: [2:7439874155654772381:2346] 2024-11-21T23:11:28.524684Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjczN2Y2NzItMTllNTJhMWYtYmVkN2Y5ZDEtNGVhZWEzNGQ=, TxId: 2024-11-21T23:11:28.524710Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjczN2Y2NzItMTllNTJhMWYtYmVkN2Y5ZDEtNGVhZWEzNGQ=, TxId: 2024-11-21T23:11:28.524841Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 8166532f-b34c8981-a6ae5446-8ca6f474, result part successfully saved 2024-11-21T23:11:28.524854Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 8166532f-b34c8981-a6ae5446-8ca6f474, reply SUCCESS, issues: 2024-11-21T23:11:28.532588Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, Bootstrap. Database: /dc-1 2024-11-21T23:11:28.532756Z node 2 :KQP_PROXY DEBUG: Request has 18445011843021.018880s seconds to be completed 2024-11-21T23:11:28.534595Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YTc5ZDEzODMtODgwMDUzMzEtMTY1ODMxOWQtZGI4OWIwZjk=, workerId: [2:7439874155654772415:2359], database: /dc-1, longSession: 1, local sessions count: 3 2024-11-21T23:11:28.534698Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2024-11-21T23:11:28.535132Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NjczN2Y2NzItMTllNTJhMWYtYmVkN2Y5ZDEtNGVhZWEzNGQ=, workerId: [2:7439874155654772381:2346], local sessions count: 2 2024-11-21T23:11:28.535232Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 8166532f-b34c8981-a6ae5446-8ca6f474, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2024-11-21T23:11:28.535534Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YTc5ZDEzODMtODgwMDUzMzEtMTY1ODMxOWQtZGI4OWIwZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7439874155654772415:2359] 2024-11-21T23:11:28.535564Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7439874155654772418:2646] >> TxUsage::WriteToTopic_Demo_27 [GOOD] >> KqpRm::NotEnoughMemory >> KqpRm::SingleSnapshotByExchanger >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> KqpRm::DisonnectNodes [GOOD] >> TTabletPipeTest::TestRewriteSameNode >> TxUsage::WriteToTopic_Demo_28 >> KqpRm::NotEnoughExecutionUnits [GOOD] >> TResourceBroker::TestErrors >> KqpRm::NotEnoughMemory [GOOD] >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2024-11-21T23:10:29.107651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873904937209586:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:29.107740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:29.358109Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c10/r3tmp/tmpAIGh7b/pdisk_1.dat 2024-11-21T23:10:29.688117Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:29.690102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:29.690195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:29.699974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28695, node 1 2024-11-21T23:10:29.861258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002c10/r3tmp/yandexm675ZR.tmp 2024-11-21T23:10:29.861283Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002c10/r3tmp/yandexm675ZR.tmp 2024-11-21T23:10:29.861434Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002c10/r3tmp/yandexm675ZR.tmp 2024-11-21T23:10:29.861566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:29.919787Z INFO: TTestServer started on Port 7353 GrpcPort 28695 TClient is connected to server localhost:7353 PQClient connected to localhost:28695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:30.355735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:10:30.381259Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:30.386868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:10:32.741806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873917822112267:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:32.741922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:32.743289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873917822112280:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:32.747700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:32.775053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873917822112282:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:33.193005Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873922117079653:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:33.196366Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODNkZjE4OGQtMWY0ZjYzN2QtMjVhZmQ2NDMtOTRiMWI2ZTk=, ActorId: [1:7439873917822112265:2307], ActorState: ExecuteState, TraceId: 01jd8fvn8k5bqcq1jyvzw221mk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:33.198814Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:33.202150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:33.245875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:33.401440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873922117079931:2627] 2024-11-21T23:10:34.110532Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873904937209586:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:34.110606Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:39.676639Z :ValidateSettingsFailOnStart INFO: TTopicSdkTestSetup started 2024-11-21T23:10:39.693987Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:39.742668Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:39.743702Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:39.743962Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:39.744162Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:39.744193Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:39.744214Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:39.744237Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:39.744274Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:39.744308Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:39.744323Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:39.760238Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:39.760298Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873947886884013:2799], now have 1 active actors on pipe 2024-11-21T23:10:39.760344Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T23:10:39.762799Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873947886884014:2800] connected; active server actors: 1 2024-11-21T23:10:39.763049Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:39.763938Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:39.764085Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:39.765053Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:39.765072Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:39.783248Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439873904937210019:2199] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active Creat ... ) wait data in partition inited, cookie 1 from offset4 2024-11-21T23:11:27.649498Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid e2b086c-adfbc395-1bd64d58-d32c3efe has messages 1 2024-11-21T23:11:27.649589Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 read done: guid# e2b086c-adfbc395-1bd64d58-d32c3efe, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 201 2024-11-21T23:11:27.649618Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 response to read: guid# e2b086c-adfbc395-1bd64d58-d32c3efe 2024-11-21T23:11:27.649865Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 Process answer. Aval parts: 0 2024-11-21T23:11:27.650908Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] Got ReadResponse, serverBytesSize = 201, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428599 2024-11-21T23:11:27.651009Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428599 2024-11-21T23:11:27.651358Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2024-11-21T23:11:27.651416Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] Returning serverBytesSize = 201 to budget 2024-11-21T23:11:27.651447Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] In ContinueReadingDataImpl, ReadSizeBudget = 201, ReadSizeServerDelta = 52428599 2024-11-21T23:11:27.651704Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T23:11:27.651987Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (3-3) 2024-11-21T23:11:27.652068Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] The application data is transferred to the client. Number of messages 1, size 9 bytes 2024-11-21T23:11:27.652115Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] Returning serverBytesSize = 0 to budget 0 1 2024-11-21T23:11:27.652208Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] Commit offsets [3, 4). Partition stream id: 1 2024-11-21T23:11:27.653182Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 grpc read done: success# 1, data# { read_request { bytes_size: 201 } } 2024-11-21T23:11:27.653298Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 got read request: guid# a12523ec-4409ea15-f02c50d6-82161768 2024-11-21T23:11:27.653470Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 3 end: 4 } } } } 2024-11-21T23:11:27.653629Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) committing to position 4 prev 3 end 4 by cookie 2 2024-11-21T23:11:27.656554Z node 5 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T23:11:27.656591Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T23:11:27.656708Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 offset is set to 4 (startOffset 0) session consumer-1_5_2_1924281003139798686_v1 2024-11-21T23:11:27.656846Z node 5 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:11:27.657997Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user consumer-1 readTimeStamp for offset 4 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 2 2024-11-21T23:11:27.658048Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:11:27.658092Z node 5 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T23:11:27.658181Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2024-11-21T23:11:27.658212Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) commit done to position 4 endOffset 4 with cookie 2 2024-11-21T23:11:27.658874Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 4 } } 2024-11-21T23:11:27.658253Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 4 2024-11-21T23:11:28.198135Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:11:28.198175Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:28.510508Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:11:28.515545Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0 describe result for acl check 2024-11-21T23:11:28.609622Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2024-11-21T23:11:28.609683Z :INFO: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:11:28.621331Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 checking auth because of timeout 2024-11-21T23:11:28.621405Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 auth for : consumer-1 2024-11-21T23:11:28.621926Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 Handle describe topics response 2024-11-21T23:11:28.622004Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 auth is DEAD 2024-11-21T23:11:28.622067Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 auth ok: topics# 1, initDone# 1 2024-11-21T23:11:29.614827Z :INFO: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] Closing read session. Close timeout: 0.000000s 2024-11-21T23:11:29.614892Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2024-11-21T23:11:29.614953Z :INFO: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2005 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:11:29.615065Z :NOTICE: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:11:29.615112Z :DEBUG: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] [] Abort session to cluster 2024-11-21T23:11:29.615726Z :NOTICE: [/Root] [/Root] [67d23d25-65edfc94-16b8d5d0-6904ac3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:11:29.623193Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 grpc read done: success# 0, data# { } 2024-11-21T23:11:29.623227Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 grpc read failed 2024-11-21T23:11:29.623264Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 grpc closed 2024-11-21T23:11:29.623302Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer-1 session consumer-1_5_2_1924281003139798686_v1 is DEAD 2024-11-21T23:11:29.626121Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7439874153432375667:2514] disconnected; active server actors: 1 2024-11-21T23:11:29.626149Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7439874153432375667:2514] client consumer-1 disconnected session consumer-1_5_2_1924281003139798686_v1 2024-11-21T23:11:29.626251Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:11:29.626273Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session consumer-1_5_2_1924281003139798686_v1 2024-11-21T23:11:29.626313Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439874153432375670:2517] destroyed 2024-11-21T23:11:29.626367Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer-1_5_2_1924281003139798686_v1 2024-11-21T23:11:29.642617Z :INFO: [/Root] SessionId [test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T23:11:29.642659Z :INFO: [/Root] SessionId [test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T23:11:29.642706Z :DEBUG: [/Root] SessionId [test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T23:11:29.646669Z :INFO: [/Root] SessionId [test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T23:11:29.646718Z :DEBUG: [/Root] SessionId [test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T23:11:29.647580Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0 grpc read done: success: 0 data: 2024-11-21T23:11:29.647614Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0 grpc read failed 2024-11-21T23:11:29.647643Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0 grpc closed 2024-11-21T23:11:29.647658Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|7f5e1d48-2371b9c7-ad9062f7-4d65585_0 is DEAD 2024-11-21T23:11:29.648413Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:11:29.648770Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:11:29.648801Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439874144842440875:2469] destroyed 2024-11-21T23:11:29.648847Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2024-11-21T23:11:30.119519Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:30.120080Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001dcc/r3tmp/tmpcAkDi5/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:30.120692Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001dcc/r3tmp/tmpcAkDi5/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001dcc/r3tmp/tmpcAkDi5/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4411065949536656573 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:30.193616Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:30.193938Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:30.216363Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:30.216521Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:30.216705Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:30.216791Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:30.216947Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:30.216985Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:30.217017Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:30.217036Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:30.217247Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:30.248404Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230690 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:30.248683Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:30.248770Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230690 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:30.249167Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:30.249314Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:30.249440Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:30.249478Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:30.249583Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230690 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:30.249782Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:30.249818Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:30.249887Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230690 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:30.250574Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:30.250692Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:30.251198Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:30.251467Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:30.251796Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:30.251979Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:30.252155Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:30.252315Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:31.347631Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:11:31.347731Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:11:31.348049Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2024-11-21T23:11:31.348402Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2024-11-21T23:11:31.348786Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2024-11-21T23:11:31.348933Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:55} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:54:2071] ServerId# [1:346:2266] TabletId# 72057594037932033 PipeClientId# [2:54:2071] 2024-11-21T23:11:31.349101Z node 2 :TX_PROXY WARN: actor# [2:141:2085] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2024-11-21T23:11:31.349219Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:31.349364Z node 2 :KQP_RESOURCE_MANAGER INFO: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:31.349448Z node 2 :KQP_RESOURCE_MANAGER INFO: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:453:2101], reason: tenant updated 2024-11-21T23:11:31.349686Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:31.352166Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:31.352323Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:31.695314Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2024-11-21T23:11:31.816849Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:31.817370Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001d33/r3tmp/tmp8ytInq/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:31.817982Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001d33/r3tmp/tmp8ytInq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001d33/r3tmp/tmp8ytInq/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17887563608948988567 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:31.865004Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:31.865323Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:31.881603Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:31.881753Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:31.882114Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:31.882214Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:31.882423Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:31.882476Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:31.882512Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:31.882534Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:31.882716Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:31.893239Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230691 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:31.893478Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:31.893571Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230691 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:31.893968Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:31.894145Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:31.894276Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:31.894312Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:31.894541Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230691 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:31.894726Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:31.894763Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:31.894856Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230691 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:31.895465Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:31.895612Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:31.896105Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:31.896437Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:31.896799Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:31.896993Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:31.897200Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:31.897466Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2024-11-21T23:11:32.037393Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:32.037797Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001d21/r3tmp/tmpDGwlq5/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:32.038453Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001d21/r3tmp/tmpDGwlq5/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001d21/r3tmp/tmpDGwlq5/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11642014488692691138 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:32.075924Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:32.076190Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:32.095687Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:32.095805Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:32.095989Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:32.096061Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:32.096201Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:32.096235Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:32.096267Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:32.096288Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:32.096450Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.108293Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230692 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:32.108494Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.108565Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230692 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:32.108885Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:32.109009Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:32.109108Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:32.109133Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.109304Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230692 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:32.109433Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:32.109470Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.109527Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230692 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:32.110247Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:32.110340Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:32.110877Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:32.111155Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:32.111421Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:32.111577Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:32.111802Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:32.111973Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> KqpRm::SingleSnapshotByExchanger [GOOD] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] Test command err: 2024-11-21T23:10:27.962196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:27.969122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:27.969315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019c3/r3tmp/tmpPjXMWE/pdisk_1.dat 2024-11-21T23:10:28.384154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:28.439969Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:28.491656Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:10:28.492854Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:10:28.493102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:28.493254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:28.504896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:28.621593Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:10:28.621678Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:10:28.621849Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:10:28.718415Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:10:28.719102Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:10:28.719213Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:10:28.719549Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:10:28.719697Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:10:28.719782Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:10:28.719998Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:10:28.721475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:28.722605Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:10:28.722705Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:10:28.757117Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:10:28.758094Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:10:28.758539Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:10:28.758828Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:10:28.817116Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:10:28.817849Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:10:28.817967Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:10:28.825219Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:10:28.825343Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:10:28.825411Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:10:28.825790Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:10:28.869257Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:10:28.869415Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:10:28.869509Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:10:28.869542Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:28.869572Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:10:28.869607Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:28.869953Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:28.870059Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:28.870563Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:10:28.870672Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:10:28.870834Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:28.870867Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:28.870910Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:10:28.870956Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:28.870999Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:28.871051Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:10:28.871092Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:10:28.871123Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:10:28.871159Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:10:28.871207Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:28.871315Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:10:28.871360Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:10:28.871448Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:28.871653Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:10:28.871696Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:10:28.871775Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:10:28.871830Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:10:28.871875Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:10:28.871905Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:10:28.871939Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:28.872209Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:10:28.872250Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:10:28.872279Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:10:28.872313Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:28.872359Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:10:28.872400Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:10:28.872430Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:10:28.872459Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:10:28.872529Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:10:28.873158Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:28.873199Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:28.873537Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:28.873580Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:10:28.873640Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:10:28.875793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:10:28.875837Z ... on unit StoreAndSendOutRS 2024-11-21T23:11:32.224168Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit StoreAndSendOutRS 2024-11-21T23:11:32.224208Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T23:11:32.224235Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit StoreAndSendOutRS 2024-11-21T23:11:32.224261Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit PrepareKqpDataTxInRS 2024-11-21T23:11:32.224291Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit PrepareKqpDataTxInRS 2024-11-21T23:11:32.224321Z node 7 :TX_DATASHARD TRACE: Prepare InReadsets from 72075186224037889 to 72075186224037888 2024-11-21T23:11:32.224349Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T23:11:32.224374Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit PrepareKqpDataTxInRS 2024-11-21T23:11:32.224396Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T23:11:32.224421Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T23:11:32.224450Z node 7 :TX_DATASHARD TRACE: Prepare for loading readset for [900:281474976715668] at 72075186224037888 source=72075186224037889 target=72075186224037888 2024-11-21T23:11:32.224485Z node 7 :TX_DATASHARD TRACE: Expected 1 readsets for [900:281474976715668] at 72075186224037888 2024-11-21T23:11:32.224587Z node 7 :TX_DATASHARD TRACE: Filled readset for [900:281474976715668] from=72075186224037889 to=72075186224037888origin=72075186224037889 2024-11-21T23:11:32.224626Z node 7 :TX_DATASHARD TRACE: Remain 0 read sets for [900:281474976715668] at 72075186224037888 2024-11-21T23:11:32.224663Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T23:11:32.224689Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T23:11:32.224714Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T23:11:32.224739Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T23:11:32.224783Z node 7 :TX_DATASHARD TRACE: Operation [900:281474976715668] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191852 2024-11-21T23:11:32.224962Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T23:11:32.225003Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:11:32.225030Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T23:11:32.225077Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit CompleteOperation 2024-11-21T23:11:32.225106Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:11:32.225278Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is DelayComplete 2024-11-21T23:11:32.225311Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit CompleteOperation 2024-11-21T23:11:32.225336Z node 7 :TX_DATASHARD TRACE: Add [900:281474976715668] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:11:32.225376Z node 7 :TX_DATASHARD TRACE: Trying to execute [900:281474976715668] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:11:32.225411Z node 7 :TX_DATASHARD TRACE: Execution status for [900:281474976715668] at 72075186224037888 is Executed 2024-11-21T23:11:32.225435Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [900:281474976715668] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:11:32.225462Z node 7 :TX_DATASHARD TRACE: Execution plan for [900:281474976715668] at 72075186224037888 has finished 2024-11-21T23:11:32.225489Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:32.225518Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:11:32.225545Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:11:32.225572Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:11:32.226149Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:11:32.226226Z node 7 :TX_DATASHARD TRACE: Complete execution for [700:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:11:32.226352Z node 7 :TX_DATASHARD DEBUG: Complete [700 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [7:960:2751], exec latency: 652 ms, propose latency: 652 ms 2024-11-21T23:11:32.226500Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T23:11:32.226572Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037889 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T23:11:32.226609Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:11:32.227224Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:738:2604], Recipient [7:1074:2852]: {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2024-11-21T23:11:32.227276Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:11:32.227349Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 2024-11-21T23:11:32.227583Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:32.227620Z node 7 :TX_DATASHARD TRACE: Complete execution for [700:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:11:32.227673Z node 7 :TX_DATASHARD DEBUG: Complete [700 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [7:960:2751], exec latency: 202 ms, propose latency: 202 ms 2024-11-21T23:11:32.227731Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T23:11:32.227766Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:32.228068Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2604]: {TEvReadSet step# 700 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2024-11-21T23:11:32.228105Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:11:32.228136Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2024-11-21T23:11:32.229924Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:32.229969Z node 7 :TX_DATASHARD TRACE: Complete execution for [800:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:11:32.230014Z node 7 :TX_DATASHARD DEBUG: Complete [800 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [7:999:2786], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:11:32.230072Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 800 txid# 281474976715666 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2024-11-21T23:11:32.230103Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:32.230607Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2604]: {TEvReadSet step# 800 txid# 281474976715666 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2024-11-21T23:11:32.230647Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:11:32.230679Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715666 2024-11-21T23:11:32.230856Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 7 Status: STATUS_NOT_FOUND { items { int32_value: 1 } items { int32_value: 10 } } { items { int32_value: 2 } items { int32_value: 20 } } 2024-11-21T23:11:32.232931Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:32.233002Z node 7 :TX_DATASHARD TRACE: Complete execution for [900:281474976715668] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:11:32.233092Z node 7 :TX_DATASHARD DEBUG: Complete [900 : 281474976715668] from 72075186224037888 at tablet 72075186224037888 send result to client [7:1033:2813], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:11:32.233190Z node 7 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 900 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 3} 2024-11-21T23:11:32.233254Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:32.233859Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:1074:2852], Recipient [7:738:2604]: {TEvReadSet step# 900 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 3} 2024-11-21T23:11:32.233937Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2024-11-21T23:11:32.234012Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715668 { items { int32_value: 2 } items { int32_value: 20 } } 2024-11-21T23:11:32.235252Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 7 Status: STATUS_NOT_FOUND 2024-11-21T23:11:32.236908Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:54:2101], Recipient [7:738:2604]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715667 LockNode: 7 Status: STATUS_NOT_FOUND { items { int32_value: 2 } items { int32_value: 20 } } |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2024-11-21T23:11:32.438247Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:32.438963Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001d18/r3tmp/tmpO3dRNj/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:32.439680Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001d18/r3tmp/tmpO3dRNj/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001d18/r3tmp/tmpO3dRNj/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11391575217376665553 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:32.488748Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:32.489012Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:32.508582Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:32.508718Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:32.508935Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:32.509069Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:32.509279Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:32.509318Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:32.509350Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:32.509379Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:32.509547Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.525626Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230692 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:32.525859Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.525952Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230692 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:32.526348Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:32.526678Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:32.526802Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:32.526837Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.526932Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230692 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:32.527121Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:32.527154Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.527214Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230692 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:32.527903Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:32.528058Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:32.528474Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:32.528860Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:32.529224Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:32.529391Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:32.529600Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:32.529806Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:32.533577Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:32.533651Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:32.533721Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:32.533765Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:32.533823Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:448:2330])) 2024-11-21T23:11:32.533978Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:32.534063Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:32.534100Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:32.534142Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:32.534185Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:32.534220Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:448:2330])) 2024-11-21T23:11:32.534302Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:32.534733Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:32.534969Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230692 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2024-11-21T23:11:32.535213Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:33.579975Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:11:33.580149Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:448:2330]) (release resources {0, 100}) 2024-11-21T23:11:33.580218Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300200 (remove task kqp-1-2-1 (1 by [1:448:2330])) 2024-11-21T23:11:33.580261Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100400 2024-11-21T23:11:33.580323Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2024-11-21T23:11:33.580378Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:448:2330]) (release resources {0, 100}) 2024-11-21T23:11:33.580421Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300200 to 0.100400 (remove task kqp-2-1-2 (2 by [1:448:2330])) 2024-11-21T23:11:33.580471Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2024-11-21T23:11:33.580719Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:33.580911Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230693 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:33.584022Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:33.868835Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> UpsertLoad::ShouldWriteKqpUpsert2 >> UpsertLoad::ShouldWriteDataBulkUpsert >> TxUsage::WriteToTopic_Demo_7 [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> ReadLoad::ShouldReadIterate >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo [GOOD] >> UpsertLoad::ShouldCreateTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> ReadLoad::ShouldReadKqp >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> TVPatchTests::FindingPartsWhenError [GOOD] >> TxUsage::WriteToTopic_Demo_37 [GOOD] >> TxUsage::WriteToTopic_Demo_8 >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2024-11-21T23:11:19.909915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:19.931084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:19.931346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0027ba/r3tmp/tmpu9Dwhg/pdisk_1.dat 2024-11-21T23:11:21.843092Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.143860s 2024-11-21T23:11:21.843269Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.144084s 2024-11-21T23:11:21.993984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:22.206934Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:22.284586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:22.284756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:22.300421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:22.503320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:22.617802Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:11:22.618091Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:11:22.697178Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:11:22.697334Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:11:22.703204Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:11:22.703342Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:11:22.703416Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:11:22.710081Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:11:22.784983Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:11:22.785252Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:11:22.785423Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:11:22.790524Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:11:22.790653Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:11:22.790708Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:22.795323Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:11:22.795482Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:11:22.795612Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:11:22.795718Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:22.795782Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:22.795833Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:11:22.795896Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:22.802577Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:22.810636Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:11:22.810837Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:11:22.816505Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:11:22.831171Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:22.831312Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:11:23.034418Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:11:23.038224Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:11:23.038309Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:23.038803Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:23.038865Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:11:23.039331Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:11:23.039611Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:11:23.040404Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:11:23.041223Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:23.041283Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:11:23.044310Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:11:23.044792Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:23.047102Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:11:23.047170Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:23.047873Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:11:23.047958Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:11:23.048042Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:23.049322Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:23.049366Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:11:23.049432Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:11:23.049508Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:11:23.049572Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:11:23.049670Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:23.052608Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:11:23.054723Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:11:23.054931Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:11:23.054999Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:11:23.079020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:700:2584], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:23.079155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:710:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:23.079247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:23.090598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:11:23.097258Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:11:23.303694Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:11:23.306364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:714:2592], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:25.179591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fx6e154414ag5psapnpbv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzBhYmU0ZjAtODNhYTE3YzktNDlmYTIwNTgtNjFiMTVmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:11:25.188558Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:816:2654], serverId# [1:817:2655], sessionId# [0:0:0] 2024-11-21T23:11:25.189139Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:25.206162Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:25.206303Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:25.212538Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tab ... _DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:11:35.213980Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:627:2533], serverId# [3:638:2540], sessionId# [0:0:0] 2024-11-21T23:11:35.214097Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:35.214147Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:35.214194Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:11:35.214244Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:35.214355Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:35.214615Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:11:35.214715Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:11:35.216757Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:11:35.231092Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:35.231246Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:11:35.422715Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:665:2557], serverId# [3:667:2559], sessionId# [0:0:0] 2024-11-21T23:11:35.423351Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 526 RawX2: 12884904347 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:11:35.423411Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:35.424866Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:35.424927Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:11:35.424979Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:11:35.425265Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:11:35.425417Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:11:35.425718Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:35.425781Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:11:35.431584Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:11:35.432220Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:35.434237Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:11:35.434286Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:35.435984Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:11:35.436059Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:11:35.436148Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:35.437163Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:35.437218Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:11:35.437295Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:11:35.437380Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:11:35.437434Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:11:35.437532Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:35.447129Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:11:35.449611Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:11:35.450331Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:11:35.451579Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:11:35.462706Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:699:2583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:35.462815Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:708:2588], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:35.463185Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:35.468758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:11:35.474370Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:11:35.688861Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:11:35.692020Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:713:2591], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:35.870901Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8fxjh473rpqgfhrejbt9w4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTdjMjY1YjYtZGFmNzcwMWItOWUxYmQzNjQtYTAwMzQxOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:11:35.871489Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:814:2652], serverId# [3:815:2653], sessionId# [0:0:0] 2024-11-21T23:11:35.871732Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:35.884343Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:35.884505Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:35.940094Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T23:11:35.941221Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2024-11-21T23:11:35.955518Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2024-11-21T23:11:35.955612Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:35.955895Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T23:11:35.955941Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2024-11-21T23:11:35.956182Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:35.956234Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:35.956290Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:11:35.956357Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:35.956508Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:822:2659], serverId# [3:823:2660], sessionId# [0:0:0] 2024-11-21T23:11:35.957605Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:35.958002Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:35.958208Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:35.958261Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:11:35.958324Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2024-11-21T23:11:35.959990Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:11:35.960081Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:35.960853Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2024-11-21T23:11:35.961146Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2024-11-21T23:11:35.961283Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2024-11-21T23:11:35.961334Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2024-11-21T23:11:35.962871Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2024-11-21T23:11:35.962920Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2024-11-21T23:11:35.963364Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:11:35.963404Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:11:35.963460Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2024-11-21T23:11:35.963591Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:35.963650Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:11:35.963689Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2024-11-21T23:11:37.342725Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:37.343686Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2024-11-21T23:11:37.343788Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2024-11-21T23:11:37.343915Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TxUsage::WriteToTopic_Demo_38 >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TVPatchTests::PatchPartGetError >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> TVPatchTests::PatchPartGetError [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2024-11-21T23:11:39.960082Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:39.961516Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2024-11-21T23:11:39.961596Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2024-11-21T23:11:39.961866Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2024-11-21T23:11:39.961970Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T23:11:39.962161Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2024-11-21T23:11:40.103389Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:40.104366Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2024-11-21T23:11:40.104423Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T23:11:40.104637Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2024-11-21T23:11:40.104708Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:40.105045Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2024-11-21T23:11:40.105110Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> LocalPartition::WithoutPartition [GOOD] >> LocalPartition::WithoutPartitionWithRestart >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2024-11-21T23:11:39.283447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:39.286472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:39.286619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00225c/r3tmp/tmp0HPQ96/pdisk_1.dat 2024-11-21T23:11:39.706739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.751837Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:39.809488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:39.809618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:39.822866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:39.968359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:40.418964Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2024-11-21T23:11:40.419143Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T23:11:40.530332Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.110692s, errors=0 2024-11-21T23:11:40.530469Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2024-11-21T23:11:38.657163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:38.662709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:38.662921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002284/r3tmp/tmpRCWpCV/pdisk_1.dat 2024-11-21T23:11:39.118555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.171770Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:39.222196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:39.222347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:39.235616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:39.355837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.736928Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2024-11-21T23:11:39.737062Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2024-11-21T23:11:39.740743Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} started# 5 actors each with inflight# 4 2024-11-21T23:11:39.740843Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T23:11:39.740916Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T23:11:39.740947Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T23:11:39.740975Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T23:11:39.741006Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2024-11-21T23:11:39.745787Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} session: ydb://session/3?node_id=1&id=NjY2NmMwNGQtNzc4Y2E3YjgtZTJmYWVhMjQtOGY1NTE2NzI= 2024-11-21T23:11:39.745889Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} session: ydb://session/3?node_id=1&id=NWUxZGZhZTEtYjExOThmNGItYmI2NWM2ZGEtNGRjMmZiYzM= 2024-11-21T23:11:39.747589Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} session: ydb://session/3?node_id=1&id=ZjNiMmQxN2UtOTM1ODAyZmItNmE0NWM3MS0xMWIyMjhhYQ== 2024-11-21T23:11:39.749165Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} session: ydb://session/3?node_id=1&id=NmI0MzQzNTEtZTU5N2RkMDAtMTY3YjU2ZWYtOGY4NTI1NmY= 2024-11-21T23:11:39.750851Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} session: ydb://session/3?node_id=1&id=MTZlYjJiZjQtZGY3YmIxNTYtNjFmM2E5MjQtNTYwOTk0NGU= 2024-11-21T23:11:39.755414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:715:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.755551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.755626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.755669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.755717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.755800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.755890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.763664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:11:40.107921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.108025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.108086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.108150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.108212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:757:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.580072Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} finished in 1732230700.580026s, errors=0 2024-11-21T23:11:40.580282Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1732230700580 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.657202Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} finished in 1732230700.657168s, errors=0 2024-11-21T23:11:40.657462Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1732230700657 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.736873Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} finished in 1732230700.736840s, errors=0 2024-11-21T23:11:40.737061Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1732230700736 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.813220Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 1732230700.813178s, errors=0 2024-11-21T23:11:40.813492Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1732230700813 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.896938Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} finished in 1732230700.896889s, errors=0 2024-11-21T23:11:40.897189Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1732230700896 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.897248Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} finished in 1.156840s, oks# 20, errors# 0 2024-11-21T23:11:40.897385Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2024-11-21T23:11:38.775241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:38.783746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:38.783968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00227d/r3tmp/tmpBWfOhW/pdisk_1.dat 2024-11-21T23:11:39.179650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.225045Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:39.275767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:39.275953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:39.288931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:39.403745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.784139Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2024-11-21T23:11:39.784274Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2024-11-21T23:11:39.788149Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} started# 5 actors each with inflight# 4 2024-11-21T23:11:39.788249Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:39.788308Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:39.788332Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:39.788362Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:39.788403Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:39.793111Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} session: ydb://session/3?node_id=1&id=Y2Y3ODU5ODctODliYjc4ZTktOTI5NzAyMjUtNmM5MWNhNDg= 2024-11-21T23:11:39.793182Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} session: ydb://session/3?node_id=1&id=MmUzZGY5ZWQtNzI5MDg1ZDItOThlNmFkNzItNzA3MzdmYTg= 2024-11-21T23:11:39.794842Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} session: ydb://session/3?node_id=1&id=N2I4MDU4MS0yOTY1YjZiYS04ZTVhM2IxYy1mMjBhODRhMw== 2024-11-21T23:11:39.796326Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} session: ydb://session/3?node_id=1&id=OTAyNmRjMmYtZGFmODRiOTgtYTIzMDkyM2UtOGI3ZTRiZDQ= 2024-11-21T23:11:39.797816Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} session: ydb://session/3?node_id=1&id=NGRmNmJlZDItMjMwMDk0MC1iYjljZmMyZS0xY2I4YTRlMw== 2024-11-21T23:11:39.802327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:715:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.802490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.802692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.802737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.802790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.802857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.802950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:39.813496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:11:40.065406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.065494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.065533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.065580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.065639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:757:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:40.536037Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} finished in 1732230700.535981s, errors=0 2024-11-21T23:11:40.536318Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1732230700535 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.613833Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 1732230700.613796s, errors=0 2024-11-21T23:11:40.614084Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1732230700613 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.686987Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} finished in 1732230700.686937s, errors=0 2024-11-21T23:11:40.687247Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1732230700686 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.764357Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} finished in 1732230700.764315s, errors=0 2024-11-21T23:11:40.766118Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1732230700764 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.851436Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} finished in 1732230700.851384s, errors=0 2024-11-21T23:11:40.851689Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1732230700851 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:40.851745Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} finished in 1.063929s, oks# 20, errors# 0 2024-11-21T23:11:40.851861Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogCountByResource >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2024-11-21T23:09:36.467975Z :TestReorderedExecutor INFO: Random seed for debugging is 1732230576467930 2024-11-21T23:09:37.343627Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873678351937960:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:37.343671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:37.442670Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873679138945502:2058];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:37.442713Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:37.807483Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:09:37.818825Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029d1/r3tmp/tmpCdwcch/pdisk_1.dat 2024-11-21T23:09:38.372719Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:38.393415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:38.398089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:38.398238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:38.399509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:38.399569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:38.416778Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:09:38.416909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:38.418165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11077, node 1 2024-11-21T23:09:38.781185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0029d1/r3tmp/yandex0lQ5qP.tmp 2024-11-21T23:09:38.781217Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0029d1/r3tmp/yandex0lQ5qP.tmp 2024-11-21T23:09:38.781361Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0029d1/r3tmp/yandex0lQ5qP.tmp 2024-11-21T23:09:38.781448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:38.952906Z INFO: TTestServer started on Port 21002 GrpcPort 11077 TClient is connected to server localhost:21002 PQClient connected to localhost:11077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:39.674207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:09:42.346659Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873678351937960:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:42.346728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:42.430513Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873679138945502:2058];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:42.430604Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:42.976489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873700613782284:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:42.980144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873700613782277:2282], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:42.980565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:42.993191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:09:43.045894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873700613782301:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:09:43.490138Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873704121742599:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:43.492212Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGZkZTdmZjgtMjY3NmRiNTUtOGM1NDJjZGItZTJiZWVhMmI=, ActorId: [1:7439873704121742566:2304], ActorState: ExecuteState, TraceId: 01jd8ft4sz1hywg4bvyh2gav8z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:43.495028Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:43.499965Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873704908749640:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:43.500228Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODlkYzI3ZWYtMzg1ZjAwYjMtMmY4OTVmYmQtMzA0MjgxNzU=, ActorId: [2:7439873700613782260:2280], ActorState: ExecuteState, TraceId: 01jd8ft4mq8mqvwf22wpyny37h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:43.500860Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:43.503048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:09:43.673085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:09:43.936602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:11077", true, true, 1000); 2024-11-21T23:09:44.613112Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8ft5s2722a3cme34pmw6kf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY5NTMwN2YtNDRlNWJlM2QtMmNiOTdmNWUtYzAyMGMyMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439873708416710401:3043] === CheckClustersList. Ok 2024-11-21T23:09:51.098318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:11077 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:09:51.295700Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:11077 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ... opic: "test-topic" message_group_id: "src" from ipv6:[::1]:37192 2024-11-21T23:11:38.417632Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37192 proto=v1 topic=test-topic durationSec=0 2024-11-21T23:11:38.417640Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:11:38.419328Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T23:11:38.419461Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T23:11:38.419480Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T23:11:38.419494Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T23:11:38.419514Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874200260726962:2467] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T23:11:38.422576Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874200260726962:2467] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T23:11:38.664748Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874200260726962:2467] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T23:11:38.665452Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439874200260727006:2467] connected; active server actors: 1 2024-11-21T23:11:38.665620Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874200260726962:2467] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T23:11:38.665637Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874200260726962:2467] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T23:11:38.666692Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439874200260727006:2467] disconnected; active server actors: 1 2024-11-21T23:11:38.666709Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7439874200260727006:2467] disconnected no session 2024-11-21T23:11:38.843830Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874200260726962:2467] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T23:11:38.843900Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874200260726962:2467] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T23:11:38.843924Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874200260726962:2467] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:11:38.843959Z node 13 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:11:38.845636Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 14, Generation: 1 2024-11-21T23:11:38.847243Z node 13 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0 2024-11-21T23:11:38.845334Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:11:38.845390Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [13:7439874200260727045:2467], now have 1 active actors on pipe 2024-11-21T23:11:38.845909Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:11:38.845941Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:11:38.846033Z node 14 :PERSQUEUE INFO: new Cookie src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:11:38.846138Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:11:38.846198Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:11:38.846807Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:11:38.846830Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:11:38.846922Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:11:38.849050Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732230698849 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:11:38.849206Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:11:38.850098Z :INFO: [] MessageGroupId [src] SessionId [src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0] Write session: close. Timeout = 0 ms 2024-11-21T23:11:38.850141Z :INFO: [] MessageGroupId [src] SessionId [src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0] Write session will now close 2024-11-21T23:11:38.850199Z :DEBUG: [] MessageGroupId [src] SessionId [src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0] Write session: aborting 2024-11-21T23:11:38.850803Z :INFO: [] MessageGroupId [src] SessionId [src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:11:38.850848Z :DEBUG: [] MessageGroupId [src] SessionId [src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0] Write session: destroy 2024-11-21T23:11:38.880797Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0 grpc read done: success: 0 data: 2024-11-21T23:11:38.880829Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0 grpc read failed 2024-11-21T23:11:38.880873Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0 grpc closed 2024-11-21T23:11:38.880894Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b33d5a6d-aa75e9d3-25e06cb0-88158528_0 is DEAD 2024-11-21T23:11:38.881486Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:11:38.883489Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:11:38.883546Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7439874200260727045:2467] destroyed 2024-11-21T23:11:38.883598Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:11:38.960524Z :INFO: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Starting read session 2024-11-21T23:11:38.960576Z :DEBUG: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Starting cluster discovery 2024-11-21T23:11:38.960795Z :INFO: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23262: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23262
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23262. " 2024-11-21T23:11:38.960847Z :DEBUG: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Restart cluster discovery in 0.006248s 2024-11-21T23:11:38.970816Z :DEBUG: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Starting cluster discovery 2024-11-21T23:11:38.971168Z :INFO: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23262: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23262
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23262. " 2024-11-21T23:11:38.971217Z :DEBUG: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Restart cluster discovery in 0.015626s 2024-11-21T23:11:38.990554Z :DEBUG: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Starting cluster discovery 2024-11-21T23:11:38.990771Z :INFO: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23262: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23262
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23262. " 2024-11-21T23:11:38.990803Z :DEBUG: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Restart cluster discovery in 0.020931s 2024-11-21T23:11:39.017132Z :DEBUG: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Starting cluster discovery 2024-11-21T23:11:39.017622Z :NOTICE: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23262: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23262
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23262. " } 2024-11-21T23:11:39.017842Z :NOTICE: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23262: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23262
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23262. " } 2024-11-21T23:11:39.018003Z :INFO: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Closing read session. Close timeout: 0.000000s 2024-11-21T23:11:39.018110Z :NOTICE: [/Root] [/Root] [5750ce55-c72411fa-e91f15d1-4d1daf7f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2024-11-21T23:11:42.665160Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:42.666241Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2024-11-21T23:11:42.666314Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2024-11-21T23:11:42.666687Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T23:11:42.666887Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2024-11-21T23:11:42.667000Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2024-11-21T23:11:42.444912Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:42.445650Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2024-11-21T23:11:42.445708Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2024-11-21T23:11:42.445804Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2024-11-21T23:11:42.737245Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:42.737527Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2024-11-21T23:11:42.737595Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T23:11:42.737812Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2024-11-21T23:11:42.737889Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2024-11-21T23:11:42.737965Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2024-11-21T23:11:40.462537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:40.465846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:40.466003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002233/r3tmp/tmpAHNQ4z/pdisk_1.dat 2024-11-21T23:11:40.833091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:40.875208Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:40.931612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:40.931777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:40.943451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:41.057809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.408240Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2024-11-21T23:11:41.408427Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2024-11-21T23:11:41.411885Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} started# 5 actors each with inflight# 4 2024-11-21T23:11:41.411984Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:41.412040Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:41.412069Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:41.412097Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:41.412133Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2024-11-21T23:11:41.417069Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} session: ydb://session/3?node_id=1&id=NzdiNGM3Yy04NDBiZjM5ZS02NWFmYTFmOS1kYWJhNWE2Yw== 2024-11-21T23:11:41.417155Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} session: ydb://session/3?node_id=1&id=ODI1OGU4ODYtMjg5YjdiM2ItYjNiMWQ0NWUtNjg0MTA4ZWI= 2024-11-21T23:11:41.418686Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} session: ydb://session/3?node_id=1&id=YTU1NzM4NC0yYTU0OWU2OC0xZjQ0Yjg1MC1mZGM0MzFhMw== 2024-11-21T23:11:41.420145Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} session: ydb://session/3?node_id=1&id=ZDYwMjFjMjUtZDc0Njc1YS0xMTRkZTZjMi1iOThmYTlhYg== 2024-11-21T23:11:41.421637Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} session: ydb://session/3?node_id=1&id=ZjUzNjRkYWUtNjEzNDMzZGMtNDE2M2JmN2EtNzBhYmY1NWU= 2024-11-21T23:11:41.426168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:715:2599], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.426317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:741:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.426408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.426459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:743:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.426511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:744:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.426607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:745:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.426688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.434772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:11:41.664564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:41.664687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:754:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:41.664734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:755:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:41.664796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2634], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:41.664891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:757:2635], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:42.099411Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 2} finished in 1732230702.099350s, errors=0 2024-11-21T23:11:42.099805Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1732230702099 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:42.183405Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 1} finished in 1732230702.183361s, errors=0 2024-11-21T23:11:42.183750Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1732230702183 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:42.263100Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 5} finished in 1732230702.263057s, errors=0 2024-11-21T23:11:42.263346Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1732230702263 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:42.366213Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 3} finished in 1732230702.366172s, errors=0 2024-11-21T23:11:42.366571Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1732230702366 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:42.427577Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:702:2586], subTag: 4} finished in 1732230702.427531s, errors=0 2024-11-21T23:11:42.427973Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:701:2585], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1732230702427 OperationsOK: 4 OperationsError: 0 } 2024-11-21T23:11:42.428046Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 2} finished in 1.016422s, oks# 20, errors# 0 2024-11-21T23:11:42.428184Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 >> TVPatchTests::PatchPartFastXorDiffDisorder >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout >> TVPatchTests::PatchPartOk |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> TVPatchTests::PatchPartOk [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:152:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:153:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:153:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2175] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:159:2057] recipient: [11:155:2175] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2176] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:147:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:150:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:151:2057] recipient: [23:149:2171] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:153:2057] recipient: [23:149:2171] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:152:2172] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:222:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:148:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:150:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:152:2057] recipient: [24:151:2171] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:154:2057] recipient: [24:151:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:153:2172] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:201:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:153:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:154:2057] recipient: [25:152:2173] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:156:2057] recipient: [25:152:2173] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:155:2174] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:225:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:150:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:153:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:154:2057] recipient: [26:152:2173] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:156:2057] recipient: [26:152:2173] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:155:2174] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:225:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:151:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:154:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:155:2057] recipient: [27:153:2173] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:157:2057] recipient: [27:153:2173] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:156:2174] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:204:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:156:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:155:2175] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:159:2057] recipient: [28:155:2175] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2176] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:153:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:156:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:157:2057] recipient: [29:155:2175] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:159:2057] recipient: [29:155:2175] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:158:2176] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:228:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:154:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:156:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:158:2057] recipient: [30:157:2175] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:160:2057] recipient: [30:157:2175] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:159:2176] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:229:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:159:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:161:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:163:2057] recipient: [31:162:2180] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:165:2057] recipient: [31:162:2180] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:164:2181] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:234:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:159:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:162:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:163:2057] recipient: [32:161:2180] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:165:2057] recipient: [32:161:2180] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:164:2181] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:234:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2024-11-21T23:11:44.081224Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:44.082588Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2024-11-21T23:11:44.082661Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2024-11-21T23:11:44.082889Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2024-11-21T23:11:44.082993Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T23:11:44.083153Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::PatchPartPutError >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2024-11-21T23:11:44.126355Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:44.127325Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2024-11-21T23:11:44.127382Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T23:11:44.127542Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2024-11-21T23:11:44.127615Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2024-11-21T23:11:44.127664Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2024-11-21T23:11:44.400074Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2024-11-21T23:11:44.411706Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:734} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2024-11-21T23:11:44.411802Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2024-11-21T23:11:44.411919Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2024-11-21T23:11:44.475144Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:44.476065Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2024-11-21T23:11:44.476120Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T23:11:44.476323Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2024-11-21T23:11:44.476390Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:44.476568Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2024-11-21T23:11:44.476651Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2024-11-21T23:11:44.476719Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2024-11-21T23:11:44.476911Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2024-11-21T23:11:44.476960Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2024-11-21T23:11:44.477031Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::PatchPartPutError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:59.498817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:59.498926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:59.498966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:59.499006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:59.499074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:59.499109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:59.499197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:59.499625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:59.578368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:59.578460Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:59.593366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:59.593761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:59.593994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:59.621392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:59.621791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:59.622531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:59.623294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:59.629076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:59.630594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:59.630661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:59.630746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:59.630797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:59.630838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:59.631229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:59.643678Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:59.779287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:59.779486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.779665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:59.779862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:59.779910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.781627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:59.781715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:59.781874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.781941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:59.781979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:59.782000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:59.783411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.783456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:59.783491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:59.784913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.784951Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.784993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:59.785027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:59.788885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:59.790681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:59.790919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:59.792027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:59.792168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:59.792217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:59.792473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:59.792536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:59.792716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:59.792803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:59.802792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:59.802853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:59.803071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:59.803118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:59.803615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:59.803668Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:59.803802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:59.803842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:59.803897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:59.803975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:59.804012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:59.804042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:59.804140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:59.804182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:59.804214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... emeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.502522Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T23:11:43.503623Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:2, at schemeshard: 72057594046678944 2024-11-21T23:11:43.503685Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:11:43.503993Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2024-11-21T23:11:43.504126Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2024-11-21T23:11:43.504161Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2024-11-21T23:11:43.504205Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2024-11-21T23:11:43.504906Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.504997Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.505031Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T23:11:43.507257Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.507365Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.507402Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2024-11-21T23:11:43.507440Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2024-11-21T23:11:43.507478Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T23:11:43.507573Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2024-11-21T23:11:43.509099Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2024-11-21T23:11:43.509157Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:11:43.509424Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:11:43.509554Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2024-11-21T23:11:43.509589Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T23:11:43.509634Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2024-11-21T23:11:43.509670Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2024-11-21T23:11:43.509712Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2024-11-21T23:11:43.509747Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2024-11-21T23:11:43.509855Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:11:43.509895Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2024-11-21T23:11:43.509926Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2024-11-21T23:11:43.509964Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:11:43.509994Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2024-11-21T23:11:43.510021Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2024-11-21T23:11:43.510066Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T23:11:43.510904Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.513528Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.513624Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.513663Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.513764Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.521004Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2024-11-21T23:11:43.524769Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 340 RawX2: 489626274069 } TabletId: 72075186233409546 State: 4 2024-11-21T23:11:43.524863Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2024-11-21T23:11:43.533582Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:11:43.534170Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2024-11-21T23:11:43.534446Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T23:11:43.534741Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T23:11:43.535340Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:11:43.535385Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T23:11:43.535449Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T23:11:43.535495Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:11:43.535538Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:11:43.541691Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T23:11:43.541768Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2024-11-21T23:11:43.542010Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2024-11-21T23:11:43.542270Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2024-11-21T23:11:43.542313Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2024-11-21T23:11:43.543162Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2024-11-21T23:11:43.543360Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2024-11-21T23:11:43.543398Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:615:2544] 2024-11-21T23:11:43.550246Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 342 RawX2: 489626274070 } TabletId: 72075186233409547 State: 4 2024-11-21T23:11:43.550334Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2024-11-21T23:11:43.552336Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:11:43.552902Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2024-11-21T23:11:43.561694Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T23:11:43.562175Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2024-11-21T23:11:43.565427Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:11:43.565488Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:11:43.565574Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:11:43.579115Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T23:11:43.579205Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2024-11-21T23:11:43.580015Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T23:11:43.580405Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T23:11:43.580489Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> StatisticsSaveLoad::Simple >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> StatisticsSaveLoad::ForbidAccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2024-11-21T23:11:38.938168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:38.941461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:38.941634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002271/r3tmp/tmpDXo1G5/pdisk_1.dat 2024-11-21T23:11:39.302678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.345465Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:39.395723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:39.395874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:39.407762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:39.531932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.965992Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T23:11:39.966248Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T23:11:40.037134Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.069706s, errors=0 2024-11-21T23:11:40.037274Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T23:11:43.468206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:43.468395Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:11:43.468572Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002271/r3tmp/tmpO1WOR5/pdisk_1.dat 2024-11-21T23:11:43.743156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.765979Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:43.811872Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:43.812005Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:43.823443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:43.938722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:44.217255Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T23:11:44.217428Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2024-11-21T23:11:44.290779Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.072965s, errors=0 2024-11-21T23:11:44.290900Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2024-11-21T23:11:45.113743Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:45.114719Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2024-11-21T23:11:45.114787Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2024-11-21T23:11:45.115015Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2024-11-21T23:11:45.115095Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2024-11-21T23:11:45.115320Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2024-11-21T23:11:45.115402Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2024-11-21T23:11:45.115472Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2024-11-21T23:11:45.115670Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2024-11-21T23:11:45.115731Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2024-11-21T23:11:45.115801Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TxUsage::WriteToTopic_Demo_28 [GOOD] >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2024-11-21T23:11:38.794717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:38.797738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:38.797873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0022ad/r3tmp/tmpEgZv3J/pdisk_1.dat 2024-11-21T23:11:39.215165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.262067Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:39.316256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:39.316393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:39.328195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:39.446994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.840573Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2024-11-21T23:11:39.840717Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T23:11:39.949199Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.108063s, errors=0 2024-11-21T23:11:39.949315Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T23:11:43.661959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:43.662268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:11:43.662471Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0022ad/r3tmp/tmpI2D93g/pdisk_1.dat 2024-11-21T23:11:43.948356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.975659Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:44.022578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:44.022714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:44.034038Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:44.149884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:44.437161Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2024-11-21T23:11:44.437300Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2024-11-21T23:11:44.546031Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.108334s, errors=0 2024-11-21T23:11:44.546140Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |82.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TxUsage::WriteToTopic_Demo_11 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2024-11-21T23:11:38.858148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:38.861149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:38.861285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002259/r3tmp/tmpjjXmrl/pdisk_1.dat 2024-11-21T23:11:39.182513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.233869Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:39.285541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:39.285691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:39.297073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:39.412365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:39.736432Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2024-11-21T23:11:39.737530Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2024-11-21T23:11:39.791396Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor finished in 0.053615s, errors=0 2024-11-21T23:11:39.792011Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2024-11-21T23:11:39.792145Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:710:2594] with id# {Tag: 0, parent: [1:701:2585], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2024-11-21T23:11:39.793227Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2024-11-21T23:11:39.793357Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:713:2597] 2024-11-21T23:11:39.793437Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Bootstrap called, sample# 0 2024-11-21T23:11:39.793487Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Connect to# 72075186224037888 called 2024-11-21T23:11:39.794248Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:39.802847Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} finished in 0.008536s, read# 1000 2024-11-21T23:11:39.803363Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:713:2597] with chunkSize# 0 finished: 0 { DurationMs: 8 OperationsOK: 1000 OperationsError: 0 } 2024-11-21T23:11:39.803556Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:716:2600] 2024-11-21T23:11:39.803621Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 2} Bootstrap called, sample# 0 2024-11-21T23:11:39.803649Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 2} Connect to# 72075186224037888 called 2024-11-21T23:11:39.803921Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 2} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:40.123999Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 2} finished in 0.320018s, read# 1000 2024-11-21T23:11:40.124141Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:716:2600] with chunkSize# 1 finished: 0 { DurationMs: 320 OperationsOK: 1000 OperationsError: 0 } 2024-11-21T23:11:40.124230Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:719:2603] 2024-11-21T23:11:40.124268Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 3} Bootstrap called, sample# 0 2024-11-21T23:11:40.124295Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 3} Connect to# 72075186224037888 called 2024-11-21T23:11:40.124519Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 3} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:40.147051Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 3} finished in 0.022484s, read# 1000 2024-11-21T23:11:40.147192Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:719:2603] with chunkSize# 10 finished: 0 { DurationMs: 22 OperationsOK: 1000 OperationsError: 0 } 2024-11-21T23:11:40.147314Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:722:2606] 2024-11-21T23:11:40.147368Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 4} Bootstrap called, sample# 1000 2024-11-21T23:11:40.147398Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 4} Connect to# 72075186224037888 called 2024-11-21T23:11:40.147614Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 4} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:40.150366Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 4} finished in 0.002263s, sampled# 1000, iter finished# 1, oks# 1000 2024-11-21T23:11:40.150517Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} received keyCount# 1000 2024-11-21T23:11:40.150702Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} started read actor with id# [1:725:2609] 2024-11-21T23:11:40.150762Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:710:2594], subTag: 5} Bootstrap called, will read keys# 1000 2024-11-21T23:11:40.556307Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} received point times# 1000, Inflight left# 0 2024-11-21T23:11:40.556465Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 405 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 41\n" } 2024-11-21T23:11:40.556595Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:701:2585], subTag: 3} finished in 0.764306s with report: { DurationMs: 8 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 320 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 22 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 405 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 41\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2024-11-21T23:11:40.556844Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:710:2594] with tag# 3 2024-11-21T23:11:44.114292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:44.114522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:11:44.114725Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002259/r3tmp/tmpgEZeYr/pdisk_1.dat 2024-11-21T23:11:44.356559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:44.386246Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:44.433338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:44.433479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:44.445214Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:44.559354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:44.849881Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2024-11-21T23:11:44.850252Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2024-11-21T23:11:44.872618Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 1} TUpsertActor finished in 0.022049s, errors=0 2024-11-21T23:11:44.873187Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2024-11-21T23:11:44.873320Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:709:2593] with id# {Tag: 0, parent: [2:700:2584], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2024-11-21T23:11:44.874352Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2024-11-21T23:11:44.874491Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:712:2596] 2024-11-21T23:11:44.874580Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 1} Bootstrap called, sample# 0 2024-11-21T23:11:44.874636Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 1} Connect to# 72075186224037888 called 2024-11-21T23:11:44.874944Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 1} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:44.876265Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 1} finished in 0.001269s, read# 10 2024-11-21T23:11:44.876440Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:712:2596] with chunkSize# 0 finished: 0 { DurationMs: 1 OperationsOK: 10 OperationsError: 0 } 2024-11-21T23:11:44.876543Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:715:2599] 2024-11-21T23:11:44.876582Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 2} Bootstrap called, sample# 0 2024-11-21T23:11:44.876624Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 2} Connect to# 72075186224037888 called 2024-11-21T23:11:44.876871Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 2} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:44.878940Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 2} finished in 0.002031s, read# 10 2024-11-21T23:11:44.879033Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:715:2599] with chunkSize# 1 finished: 0 { DurationMs: 2 OperationsOK: 10 OperationsError: 0 } 2024-11-21T23:11:44.879111Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:718:2602] 2024-11-21T23:11:44.879161Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 3} Bootstrap called, sample# 0 2024-11-21T23:11:44.879188Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 3} Connect to# 72075186224037888 called 2024-11-21T23:11:44.879408Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 3} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:44.880000Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 3} finished in 0.000548s, read# 10 2024-11-21T23:11:44.880078Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:718:2602] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2024-11-21T23:11:44.880182Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:721:2605] 2024-11-21T23:11:44.880226Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 4} Bootstrap called, sample# 10 2024-11-21T23:11:44.880249Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 4} Connect to# 72075186224037888 called 2024-11-21T23:11:44.880429Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 4} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:44.880808Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:709:2593], subTag: 4} finished in 0.000309s, sampled# 10, iter finished# 1, oks# 10 2024-11-21T23:11:44.880895Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} received keyCount# 10 2024-11-21T23:11:44.881029Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} started read actor with id# [2:724:2608] 2024-11-21T23:11:44.881088Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:709:2593], subTag: 5} Bootstrap called, will read keys# 10 2024-11-21T23:11:45.198663Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} received point times# 1000, Inflight left# 0 2024-11-21T23:11:45.198898Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 317 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 22\n" } 2024-11-21T23:11:45.199068Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:700:2584], subTag: 3} finished in 0.325584s with report: { DurationMs: 1 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 2 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 317 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 22\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2024-11-21T23:11:45.199172Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:709:2593] with tag# 3 >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> UpsertLoad::ShouldDropCreateTable [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> Describe::Location [GOOD] >> Describe::DescribePartitionPermissions >> StatisticsSaveLoad::Delete >> TPipeCacheTest::TestIdleRefresh >> TOlap::StoreStats [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> TResourceBroker::TestAutoTaskId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 6568, MsgBus: 5029 2024-11-21T23:11:26.130038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874148907329751:2150];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:26.130442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012f5/r3tmp/tmpe0jm2i/pdisk_1.dat 2024-11-21T23:11:26.554816Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:26.567659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:26.567746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:26.570287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6568, node 1 2024-11-21T23:11:26.721451Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:26.721481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:26.721490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:26.721621Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5029 TClient is connected to server localhost:5029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:27.461343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:27.504940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:27.649965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:27.822111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:27.909559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:29.644591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874161792233220:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.644771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.951951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.994010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:30.031840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:30.133037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:30.249801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:30.326207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:30.400613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874166087201024:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:30.400687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:30.401002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874166087201029:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:30.405398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:30.416935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874166087201031:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:31.130341Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874148907329751:2150];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:31.130441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:31.565161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:11:32.312125Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230692346, txId: 281474976710675] shutting down 864000000000 Trying to start YDB, gRPC: 24279, MsgBus: 27781 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012f5/r3tmp/tmpCqhaab/pdisk_1.dat 2024-11-21T23:11:33.502677Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:33.524596Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:33.524691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:33.528979Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:33.549165Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24279, node 2 2024-11-21T23:11:33.638936Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:33.638969Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:33.638977Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:33.639071Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27781 TClient is connected to server localhost:27781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:34.115842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:34.125384Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:11:34.133612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:34.336630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:34.515374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:34.605178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:37.322532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874194322079914:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:37.322627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:37.402496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:37.475506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:37.526159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:37.594205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:37.649072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:37.742912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:37.840899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874194322080418:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:37.840991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:37.841322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874194322080423:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:37.845817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:37.863679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874194322080425:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:39.125488Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230699157, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 18499, MsgBus: 7211 2024-11-21T23:11:39.925960Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439874201957884405:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:39.926007Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012f5/r3tmp/tmp6yyE7U/pdisk_1.dat 2024-11-21T23:11:40.329132Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18499, node 3 2024-11-21T23:11:40.411769Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:40.411800Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:40.411810Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:40.411947Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:11:40.418001Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:40.418102Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:40.420597Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7211 TClient is connected to server localhost:7211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:40.914693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:40.946862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:40.997541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:11:41.147506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.233367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:43.606168Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874219137755298:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:43.606293Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:43.652860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.694131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.727427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.759888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.792334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.827045Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.898007Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874219137755793:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:43.898109Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874219137755798:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:43.898109Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:43.901163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:43.909461Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439874219137755800:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:11:44.936087Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439874201957884405:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:44.951460Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:45.186523Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230705219, txId: 281474976715671] shutting down 2024-11-21T23:11:45.414284Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230705450, txId: 281474976715673] shutting down >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerInstant::Test >> TxUsage::WriteToTopic_Demo_12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::StoreStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:11:29.296780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:11:29.296892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:11:29.296950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:11:29.297008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:11:29.297064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:11:29.297100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:11:29.297164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:11:29.297495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:11:29.385723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:11:29.385789Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:29.420889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:11:29.425105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:11:29.425332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:11:29.441432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:11:29.442948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:11:29.443718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:11:29.444100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:11:29.451269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:11:29.452726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:11:29.452794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:11:29.453005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:11:29.453051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:11:29.453094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:11:29.453185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:11:29.462042Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:11:29.578223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:11:29.578534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:29.578775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:11:29.579008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:11:29.579081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:29.582820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:11:29.582965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:11:29.583211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:29.583299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:11:29.583363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:11:29.583409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:11:29.586978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:29.587066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:11:29.587109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:11:29.590215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:29.590277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:29.590331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:11:29.590385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:11:29.595742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:11:29.599082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:11:29.599324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:11:29.600437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:11:29.600628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:11:29.600684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:11:29.600986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:11:29.601048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:11:29.601257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:11:29.601374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:11:29.612229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:11:29.612298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:11:29.612527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:11:29.612576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:11:29.613045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:29.613104Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:11:29.613213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:11:29.613253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:11:29.613301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:11:29.613349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:11:29.613410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:11:29.613446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:11:29.613532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:11:29.613574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:11:29.613612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:11:29.620595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:11:29.620806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:11:29.620851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:11:29.620916Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:11:29.620970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:11:29.621117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1732230700843 LastUpdateTime: 1732230700843 ImmediateTxCompleted: 11 PlannedTxCompleted: 12 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 2 RowUpdates: 1100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1270768 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 100 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 32 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } } Children { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1732230700843 LastUpdateTime: 1732230700843 ImmediateTxCompleted: 11 PlannedTxCompleted: 12 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 2 RowUpdates: 1100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1270768 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 100 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 32 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944 2024-11-21T23:11:47.014227Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:11:47.014485Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 269us result status StatusSuccess 2024-11-21T23:11:47.014866Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1732230700843 LastUpdateTime: 1732230700843 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 100 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 32 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 1270768 RowCount: 100000 IndexSize: 0 LastAccessTime: 1732230700843 LastUpdateTime: 1732230700843 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 100 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 32 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1270768 DataSize: 1270768 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows >> BootstrapperTest::LoneBootstrapper |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2024-11-21T23:11:41.079031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:41.081291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:41.081406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002225/r3tmp/tmpas03Bj/pdisk_1.dat 2024-11-21T23:11:41.447105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.486920Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:41.540497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:41.540648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:41.552750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:41.673295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:42.026884Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2024-11-21T23:11:42.027064Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T23:11:42.117106Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 2} TUpsertActor finished in 0.089521s, errors=0 2024-11-21T23:11:42.117230Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:702:2586] with tag# 2 2024-11-21T23:11:45.629613Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:45.629801Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:11:45.630037Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002225/r3tmp/tmpGh5BJX/pdisk_1.dat 2024-11-21T23:11:45.895790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:45.927766Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:45.978251Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:45.978383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:45.990004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:46.111807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:46.410168Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2024-11-21T23:11:46.410342Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2024-11-21T23:11:46.477711Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.066937s, errors=0 2024-11-21T23:11:46.477839Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 >> TPipeCacheTest::TestTabletNode [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2024-11-21T23:11:39.896496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:39.902326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:39.902555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002241/r3tmp/tmpQjtPNh/pdisk_1.dat 2024-11-21T23:11:40.300556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:40.345360Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:40.395967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:40.396135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:40.408085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:40.525490Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2024-11-21T23:11:40.714944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:611:2519], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:40.715104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:40.729567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.057041Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2024-11-21T23:11:41.058825Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2024-11-21T23:11:41.081198Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 1} TUpsertActor finished in 0.022064s, errors=0 2024-11-21T23:11:41.081520Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T23:11:41.081704Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2024-11-21T23:11:41.139143Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:607:2516], subTag: 3} TUpsertActor finished in 0.057175s, errors=0 2024-11-21T23:11:41.139229Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:718:2595] with tag# 3 2024-11-21T23:11:44.655017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:44.655195Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:11:44.655367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002241/r3tmp/tmpz8qtkJ/pdisk_1.dat 2024-11-21T23:11:44.930719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:44.961489Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:45.007606Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:45.007740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:45.019309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:45.138089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:45.430191Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2024-11-21T23:11:45.430321Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2024-11-21T23:11:45.869323Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:700:2584], subTag: 2} TUpsertActor finished in 0.438576s, errors=0 2024-11-21T23:11:45.869430Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:701:2585] with tag# 2 2024-11-21T23:11:45.874824Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2024-11-21T23:11:45.885396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:742:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:45.885533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:46.100061Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2024-11-21T23:11:46.127971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:808:2672], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:46.128147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:46.140215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:11:46.191395Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T23:11:46.415580Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2024-11-21T23:11:46.415930Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2024-11-21T23:11:46.427915Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 1} TUpsertActor finished in 0.011682s, errors=0 2024-11-21T23:11:46.428243Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2024-11-21T23:11:46.428384Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2024-11-21T23:11:46.485527Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:739:2623], subTag: 3} TUpsertActor finished in 0.056796s, errors=0 2024-11-21T23:11:46.485637Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:901:2746] with tag# 3 |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TResourceBrokerInstant::Test [GOOD] >> TxUsage::WriteToTopic_Demo_29 >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect >> BootstrapperTest::KeepExistingTablet >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::Test [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::SingleBucket [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> TPipeCacheTest::TestAutoConnect [GOOD] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes >> TTabletPipeTest::TestPipeConnectToHint >> TTabletPipeTest::TestSendAfterOpen >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:156:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:155:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:155:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:156:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:155:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:155:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:158:2057] recipient: [10:156:2176] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:160:2057] recipient: [10:156:2176] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:159:2177] Leader for TabletID 72057594037927937 is [10:159:2177] sender: [10:229:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:161:2181] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:165:2057] recipient: [11:161:2181] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:164:2182] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:234:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 6:2167] sender: [29:216:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:142:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:145:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:146:2057] recipient: [30:144:2166] Leader for TabletID 72057594037927937 is [30:147:2167] sender: [30:148:2057] recipient: [30:144:2166] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:147:2167] Leader for TabletID 72057594037927937 is [30:147:2167] sender: [30:217:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:147:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:150:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:151:2057] recipient: [31:149:2171] Leader for TabletID 72057594037927937 is [31:152:2172] sender: [31:153:2057] recipient: [31:149:2171] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:152:2172] Leader for TabletID 72057594037927937 is [31:152:2172] sender: [31:222:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:147:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:150:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:151:2057] recipient: [32:149:2171] Leader for TabletID 72057594037927937 is [32:152:2172] sender: [32:153:2057] recipient: [32:149:2171] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:152:2172] Leader for TabletID 72057594037927937 is [32:152:2172] sender: [32:222:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:148:2057] recipient: [33:97:2132] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:151:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:152:2057] recipient: [33:150:2171] Leader for TabletID 72057594037927937 is [33:153:2172] sender: [33:154:2057] recipient: [33:150:2171] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:153:2172] Leader for TabletID 72057594037927937 is [33:153:2172] sender: [33:223:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:153:2057] recipient: [34:97:2132] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:156:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:157:2057] recipient: [34:155:2176] Leader for TabletID 72057594037927937 is [34:158:2177] sender: [34:159:2057] recipient: [34:155:2176] !Reboot 72057594037927937 (actor [34:105:2137]) rebooted! !Reboot 72057594037927937 (actor [34:105:2137]) tablet resolver refreshed! new actor is[34:158:2177] Leader for TabletID 72057594037927937 is [34:158:2177] sender: [34:228:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:101:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:106:2057] recipient: [35:99:2133] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:139:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:153:2057] recipient: [35:97:2132] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:156:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:105:2137] sender: [35:157:2057] recipient: [35:155:2176] Leader for TabletID 72057594037927937 is [35:158:2177] sender: [35:159:2057] recipient: [35:155:2176] !Reboot 72057594037927937 (actor [35:105:2137]) rebooted! !Reboot 72057594037927937 (actor [35:105:2137]) tablet resolver refreshed! new actor is[35:158:2177] Leader for TabletID 72057594037927937 is [35:158:2177] sender: [35:228:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:101:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:106:2057] recipient: [36:99:2133] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:139:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:155:2057] recipient: [36:97:2132] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:158:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:105:2137] sender: [36:159:2057] recipient: [36:157:2177] Leader for TabletID 72057594037927937 is [36:160:2178] sender: [36:161:2057] recipient: [36:157:2177] !Reboot 72057594037927937 (actor [36:105:2137]) rebooted! !Reboot 72057594037927937 (actor [36:105:2137]) tablet resolver refreshed! new actor is[36:160:2178] Leader for TabletID 72057594037927937 is [36:160:2178] sender: [36:230:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:101:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:106:2057] recipient: [37:99:2133] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:139:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:160:2057] recipient: [37:97:2132] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:163:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:105:2137] sender: [37:164:2057] recipient: [37:162:2182] Leader for TabletID 72057594037927937 is [37:165:2183] sender: [37:166:2057] recipient: [37:162:2182] !Reboot 72057594037927937 (actor [37:105:2137]) rebooted! !Reboot 72057594037927937 (actor [37:105:2137]) tablet resolver refreshed! new actor is[37:165:2183] Leader for TabletID 72057594037927937 is [37:165:2183] sender: [37:235:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:101:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:106:2057] recipient: [38:99:2133] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:139:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:160:2057] recipient: [38:97:2132] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:163:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:105:2137] sender: [38:164:2057] recipient: [38:162:2182] Leader for TabletID 72057594037927937 is [38:165:2183] sender: [38:166:2057] recipient: [38:162:2182] !Reboot 72057594037927937 (actor [38:105:2137]) rebooted! !Reboot 72057594037927937 (actor [38:105:2137]) tablet resolver refreshed! new actor is[38:165:2183] Leader for TabletID 72057594037927937 is [38:165:2183] sender: [38:235:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:101:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:106:2057] recipient: [39:99:2133] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:139:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:161:2057] recipient: [39:97:2132] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:164:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:105:2137] sender: [39:165:2057] recipient: [39:163:2182] Leader for TabletID 72057594037927937 is [39:166:2183] sender: [39:167:2057] recipient: [39:163:2182] !Reboot 72057594037927937 (actor [39:105:2137]) rebooted! !Reboot 72057594037927937 (actor [39:105:2137]) tablet resolver refreshed! new actor is[39:166:2183] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:101:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:106:2057] recipient: [40:99:2133] Leader for TabletID 72057594037927937 is [40:105:2137] sender: [40:139:2057] recipient: [40:14:2061] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletPipeTest::TestShutdown >> TTabletPipeTest::TestSendAfterOpen [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TxUsage::WriteToTopic_Demo_38 [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TTabletResolver::TabletResolvePriority [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TTabletPipeTest::TestShutdown [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:115:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [1:117:2145] sender: [1:119:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:154:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:158:2057] recipient: [1:99:2134] Leader for TabletID 9437185 is [1:117:2145] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:163:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:165:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:193:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:196:2057] recipient: [1:98:2133] Leader for TabletID 9437184 is [1:114:2143] sender: [1:199:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:200:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:202:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:230:2057] recipient: [1:14:2061] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] >> BootstrapperTest::DuplicateNodes [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] >> TxUsage::WriteToTopic_Demo_39 |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestShutdown [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2024-11-21T23:11:50.124424Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.124512Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.124950Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T23:11:50.124980Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 7090319362426798975 2024-11-21T23:11:50.125069Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T23:11:50.125086Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16229357239031646724 2024-11-21T23:11:50.125716Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T23:11:50.125751Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2024-11-21T23:11:50.126000Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T23:11:50.126022Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.144212s 2024-11-21T23:11:50.282601Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.283343Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:206:2093] 2024-11-21T23:11:50.283734Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T23:11:50.283778Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] Test command err: 2024-11-21T23:11:50.938671Z node 1 :PIPE_SERVER DEBUG: [9437185] Detach 2024-11-21T23:11:50.958118Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2024-11-21T23:11:50.965659Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2024-11-21T23:11:50.970201Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:126:2152] 2024-11-21T23:11:50.970271Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:126:2152] 2024-11-21T23:11:50.970617Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:126:2152] 2024-11-21T23:11:50.970670Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:126:2152] 2024-11-21T23:11:50.970762Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:126:2152] 2024-11-21T23:11:50.970796Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:126:2152] 2024-11-21T23:11:50.970895Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:126:2152] 2024-11-21T23:11:50.971046Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:126:2152] Type# 269877249 Reason# ActorUnknown 2024-11-21T23:11:50.971195Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:129:2154] 2024-11-21T23:11:50.971224Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:129:2154] 2024-11-21T23:11:50.971273Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:129:2154] 2024-11-21T23:11:50.971299Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:129:2154] 2024-11-21T23:11:50.971339Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:129:2154] 2024-11-21T23:11:50.971361Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:129:2154] 2024-11-21T23:11:50.971415Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:129:2154] 2024-11-21T23:11:50.971504Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:129:2154] Type# 269877249 Reason# ActorUnknown 2024-11-21T23:11:50.971598Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:131:2156] 2024-11-21T23:11:50.971617Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:131:2156] 2024-11-21T23:11:50.971659Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:131:2156] 2024-11-21T23:11:50.971687Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:131:2156] 2024-11-21T23:11:50.971743Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:131:2156] 2024-11-21T23:11:50.971771Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:131:2156] 2024-11-21T23:11:50.971828Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:131:2156] 2024-11-21T23:11:50.971948Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:131:2156] Type# 269877249 Reason# ActorUnknown >> TTabletPipeTest::TestSendWithoutWaitOpen >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> BootstrapperTest::MultipleBootstrappers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] Test command err: 2024-11-21T23:10:19.747864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:10:19.754286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:10:19.754475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019ea/r3tmp/tmpFqaMbi/pdisk_1.dat 2024-11-21T23:10:20.190227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:10:20.289138Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:20.360429Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:10:20.361492Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:10:20.361743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:20.361876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:20.375688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:20.522532Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Handle TEvProposeTransaction 2024-11-21T23:10:20.522599Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] TxId# 281474976715657 ProcessProposeTransaction 2024-11-21T23:10:20.522752Z node 1 :TX_PROXY DEBUG: actor# [1:52:2099] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:607:2516] 2024-11-21T23:10:20.716741Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2024-11-21T23:10:20.717407Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2024-11-21T23:10:20.717509Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:10:20.717863Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2024-11-21T23:10:20.718214Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2024-11-21T23:10:20.718329Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2024-11-21T23:10:20.718708Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 HANDLE EvClientConnected 2024-11-21T23:10:20.720220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:10:20.721181Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2024-11-21T23:10:20.721271Z node 1 :TX_PROXY DEBUG: Actor# [1:607:2516] txid# 281474976715657 SEND to# [1:558:2485] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2024-11-21T23:10:20.771006Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:10:20.771982Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:10:20.772414Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:10:20.772675Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:10:20.815592Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:10:20.816252Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:10:20.816371Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:10:20.817913Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:10:20.817988Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:10:20.818037Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:10:20.818326Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:10:20.859096Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:10:20.859277Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:10:20.859388Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:10:20.859426Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:10:20.859462Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:10:20.859495Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:10:20.859908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:20.859972Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:10:20.860486Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:10:20.860578Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:10:20.860705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:20.860741Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:20.860797Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:10:20.860858Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:10:20.860906Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:10:20.860965Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:10:20.860998Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:10:20.861047Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:10:20.861084Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:10:20.861131Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:10:20.861249Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:10:20.861299Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:10:20.861393Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:10:20.861618Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:10:20.861667Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:10:20.861763Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:10:20.861814Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:10:20.861853Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:10:20.861899Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:10:20.861945Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:20.862211Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:10:20.862248Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:10:20.862287Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:10:20.862322Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:20.862384Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:10:20.862812Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:10:20.862876Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:10:20.862993Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:10:20.863022Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:10:20.864473Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:10:20.864522Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:10:20.879003Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:10:20.879076Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:10:20.879113Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:10:20.879174Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: ... :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T23:11:48.511865Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:639:2542]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T23:11:48.511917Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T23:11:48.511998Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037888, for tableId 8: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T23:11:48.512092Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 8 2024-11-21T23:11:48.512193Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:642:2544]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T23:11:48.512225Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037889 2024-11-21T23:11:48.512278Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037889, for tableId 8: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T23:11:48.512338Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037889, FollowerId 0, tableId 8 2024-11-21T23:11:48.776248Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [6:788:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T23:11:48.776332Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T23:11:48.776423Z node 6 :TX_DATASHARD TRACE: No cleanup at 72075186224037890 outdated step 62000 last cleanup 0 2024-11-21T23:11:48.776487Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:48.776530Z node 6 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2024-11-21T23:11:48.776566Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2024-11-21T23:11:48.776605Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2024-11-21T23:11:48.776739Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:791:2645]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T23:11:48.776789Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037891 2024-11-21T23:11:48.776868Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037891, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T23:11:48.776971Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037891, FollowerId 0, tableId 3 2024-11-21T23:11:48.777818Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [6:788:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T23:11:48.777876Z node 6 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037890 2024-11-21T23:11:48.777952Z node 6 :TX_DATASHARD DEBUG: BuildStats skipped at datashard 72075186224037890, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0 2024-11-21T23:11:48.778031Z node 6 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037890, FollowerId 0, tableId 3 2024-11-21T23:11:48.806611Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [6:791:2645]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2024-11-21T23:11:48.806725Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2024-11-21T23:11:48.806833Z node 6 :TX_DATASHARD TRACE: No cleanup at 72075186224037891 outdated step 62000 last cleanup 0 2024-11-21T23:11:48.806911Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:11:48.806948Z node 6 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037891 2024-11-21T23:11:48.806998Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037891 has no attached operations 2024-11-21T23:11:48.807046Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037891 2024-11-21T23:11:49.811125Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] Handle TEvProposeTransaction 2024-11-21T23:11:49.811210Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] TxId# 281474976715669 ProcessProposeTransaction 2024-11-21T23:11:49.811315Z node 6 :TX_PROXY DEBUG: actor# [6:52:2099] Cookie# 0 userReqId# "" txid# 281474976715669 SEND to# [6:1499:3277] DataReq marker# P0 2024-11-21T23:11:49.811538Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] Cookie# 0 txid# 281474976715669 HANDLE TDataReq marker# P1 2024-11-21T23:11:49.811824Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2024-11-21T23:11:49.812069Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2024-11-21T23:11:49.812195Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 2 followers disallowed marker# P4b 2024-11-21T23:11:49.812325Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 SEND TEvProposeTransaction to datashard 72075186224037889 with read table request affected shards 2 followers disallowed marker# P4b 2024-11-21T23:11:49.812723Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [6:1499:3277], Recipient [6:639:2542]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1499 RawX2: 25769807053 } TxBody: " \0018\001BC\n\014\010\200\202\224\204\200\200\200\200\001\020\010\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001(\210\'0\217\247\200\200\200\200@H\001R\022\t\333\005\000\000\000\000\000\000\021\315\014\000\000\006\000\000\000" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T23:11:49.812804Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:11:49.812938Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:11:49.813192Z node 6 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:8:0] 2024-11-21T23:11:49.813300Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit CheckDataTx 2024-11-21T23:11:49.813424Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is Executed 2024-11-21T23:11:49.813481Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T23:11:49.813531Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:11:49.813581Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T23:11:49.813612Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037888 is DelayComplete 2024-11-21T23:11:49.813648Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:11:49.813699Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037888 has finished 2024-11-21T23:11:49.813783Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:11:49.813827Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037888 on unit FinishPropose 2024-11-21T23:11:49.813879Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: BAD_REQUEST 2024-11-21T23:11:49.813957Z node 6 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037888 status: BAD_REQUEST errors: SNAPSHOT_NOT_EXIST (Shard 72075186224037888 has no snapshot { table 72057594046644480:8 version 5000/281474976715663 }) | 2024-11-21T23:11:49.814063Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:11:49.814329Z node 6 :TX_PROXY DEBUG: Actor# [6:1499:3277] txid# 281474976715669 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# BAD_REQUEST shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2024-11-21T23:11:49.819737Z node 6 :TX_PROXY ERROR: Actor# [6:1499:3277] txid# 281474976715669 RESPONSE Status# WrongRequest marker# P13c 2024-11-21T23:11:49.820236Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [6:1499:3277], Recipient [6:642:2544]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1499 RawX2: 25769807053 } TxBody: " \0018\001BC\n\014\010\200\202\224\204\200\200\200\200\001\020\010\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001(\210\'0\217\247\200\200\200\200@H\001R\022\t\333\005\000\000\000\000\000\000\021\315\014\000\000\006\000\000\000" TxId: 281474976715669 ExecLevel: 0 Flags: 8 2024-11-21T23:11:49.820288Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:11:49.820394Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T23:11:49.820577Z node 6 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:8:0] 2024-11-21T23:11:49.820653Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037889 on unit CheckDataTx 2024-11-21T23:11:49.820726Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037889 is Executed 2024-11-21T23:11:49.820761Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037889 executing on unit CheckDataTx 2024-11-21T23:11:49.820794Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715669] at 72075186224037889 to execution unit FinishPropose 2024-11-21T23:11:49.820829Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715669] at 72075186224037889 on unit FinishPropose 2024-11-21T23:11:49.820862Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715669] at 72075186224037889 is DelayComplete 2024-11-21T23:11:49.820888Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715669] at 72075186224037889 executing on unit FinishPropose 2024-11-21T23:11:49.820918Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:281474976715669] at 72075186224037889 has finished 2024-11-21T23:11:49.820980Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T23:11:49.821019Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:281474976715669] at 72075186224037889 on unit FinishPropose 2024-11-21T23:11:49.821060Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715669 at tablet 72075186224037889 send to client, exec latency: 0 ms, propose latency: 0 ms, status: BAD_REQUEST 2024-11-21T23:11:49.821104Z node 6 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037889 status: BAD_REQUEST errors: SNAPSHOT_NOT_EXIST (Shard 72075186224037889 has no snapshot { table 72057594046644480:8 version 5000/281474976715663 }) | 2024-11-21T23:11:49.821186Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 >> TResourceBroker::TestQueueWithConfigure >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> TxUsage::WriteToTopic_Demo_8 [GOOD] >> TResourceBroker::TestRealUsage >> TResourceBroker::TestResubmitTask >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> TTabletLabeledCountersAggregator::HeavyAggregation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:99:2133] ... blocking block result NO_GROUP for [1:102:2133] ... blocking block result NO_GROUP for [1:100:2133] ... blocking block result NO_GROUP for [1:101:2133] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2024-11-21T23:11:48.776521Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:48.776595Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:48.776630Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:48.779316Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T23:11:48.779383Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 7090319362426798975 2024-11-21T23:11:48.779646Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T23:11:48.779681Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16229357239031646724 2024-11-21T23:11:48.779723Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2024-11-21T23:11:48.779745Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 11763690323019591983 2024-11-21T23:11:48.780907Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T23:11:48.781050Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T23:11:48.781077Z node 3 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2024-11-21T23:11:48.781306Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T23:11:48.781338Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T23:11:48.781545Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T23:11:48.781568Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.126260s 2024-11-21T23:11:48.781632Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2024-11-21T23:11:48.781647Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.186178s 2024-11-21T23:11:48.943397Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:48.944013Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2024-11-21T23:11:48.944472Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T23:11:48.944537Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T23:11:48.997229Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:48.997862Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2024-11-21T23:11:48.998316Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T23:11:48.998349Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 3 (idx 1) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2024-11-21T23:11:49.597137Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2024-11-21T23:11:49.597231Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2024-11-21T23:11:49.597617Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T23:11:49.597677Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:49.597746Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T23:11:49.597771Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:49.599805Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2024-11-21T23:11:49.600443Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2024-11-21T23:11:49.601328Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T23:11:49.601373Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T23:11:49.602293Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T23:11:49.602330Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2024-11-21T23:11:50.087147Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 5 2024-11-21T23:11:50.087218Z node 3 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2024-11-21T23:11:50.087281Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T23:11:50.087315Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.087465Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2024-11-21T23:11:50.087482Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.088004Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2024-11-21T23:11:50.088135Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2024-11-21T23:11:50.088556Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T23:11:50.088588Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12604849125939852480 2024-11-21T23:11:50.088785Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T23:11:50.088801Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 5312993052334781800 2024-11-21T23:11:50.089047Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T23:11:50.089145Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2024-11-21T23:11:50.089170Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 3 (owner) 2024-11-21T23:11:50.089250Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: OWNER 2024-11-21T23:11:50.089268Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 3 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2024-11-21T23:11:50.625003Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2024-11-21T23:11:50.625091Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.625146Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335029 2024-11-21T23:11:50.625171Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.626069Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] 2024-11-21T23:11:50.626185Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:266:2094] ... disconnecting nodes 1 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 1 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2024-11-21T23:11:50.626792Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T23:11:50.626835Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 2024-11-21T23:11:50.626988Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2024-11-21T23:11:50.627020Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 ... disconnecting nodes 1 <-> 2 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2024-11-21T23:11:50.627313Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2024-11-21T23:11:50.627365Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED ... disconnecting nodes 1 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2024-11-21T23:11:50.627698Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2024-11-21T23:11:50.627738Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2024-11-21T23:11:50.627911Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 3, round 16045690984833335031 2024-11-21T23:11:50.627937Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: DISCONNECTED 2024-11-21T23:11:50.628051Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2024-11-21T23:11:50.628078Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.167359s 2024-11-21T23:11:50.629752Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2024-11-21T23:11:50.629811Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.632014Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:386:2094] 2024-11-21T23:11:50.647321Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T23:11:50.647371Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2024-11-21T23:11:50.717424Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2024-11-21T23:11:50.717986Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:386:2094] 2024-11-21T23:11:50.718296Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2024-11-21T23:11:50.718319Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 1 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit [GOOD] >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TTabletPipeTest::TestPipeWithVersionInfo >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> TResourceBroker::TestOverusageDifferentResources [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck >> TResourceBroker::TestRandomQueue [GOOD] >> TResourceBrokerInstant::TestErrors >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TResourceBroker::TestUpdateCookie [GOOD] >> TTabletCountersAggregator::ColumnShardCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2024-11-21T23:09:34.134070Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1732230574134030 2024-11-21T23:09:34.712839Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873667637383010:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:34.791676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:34.922671Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873664999062532:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:34.922718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029eb/r3tmp/tmpunSIje/pdisk_1.dat 2024-11-21T23:09:35.436149Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:09:35.435950Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:09:35.830532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:35.998777Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:36.043094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:09:36.113025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:36.117290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:36.117684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:36.117741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:36.144275Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:09:36.144425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:36.151121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7172, node 1 2024-11-21T23:09:36.590738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0029eb/r3tmp/yandexGxMm32.tmp 2024-11-21T23:09:36.590778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0029eb/r3tmp/yandexGxMm32.tmp 2024-11-21T23:09:36.590951Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0029eb/r3tmp/yandexGxMm32.tmp 2024-11-21T23:09:36.591056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:36.682654Z INFO: TTestServer started on Port 2721 GrpcPort 7172 TClient is connected to server localhost:2721 PQClient connected to localhost:7172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:37.287954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:09:39.706657Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873667637383010:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:39.706734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:39.894626Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873664999062532:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:39.894708Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:40.531854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873693407187669:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.536337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.539055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873693407187682:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.573955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2024-11-21T23:09:40.646821Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2024-11-21T23:09:40.647180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873693407187684:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2024-11-21T23:09:41.157581Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873693407187780:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:41.159414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:09:41.159447Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWU1ODU0NmYtMjQ1OGI4MzMtNjQxYTc0ZWEtZTYwODEzNTA=, ActorId: [1:7439873693407187667:2304], ActorState: ExecuteState, TraceId: 01jd8ft2999phg7ncsvx06bey4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:41.161423Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:41.211099Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873690768866651:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:41.213018Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTA5MzJjMDgtY2NiNWNiOGUtY2QxNzVmOWUtMjYyMTBlZGY=, ActorId: [2:7439873690768866602:2281], ActorState: ExecuteState, TraceId: 01jd8ft2ghcvqj0yet5dbd0djn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:41.213413Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:41.489399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:09:41.700307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:7172", true, true, 1000); 2024-11-21T23:09:42.156375Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd8ft3mz6qtkpm1krtars57j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFmZDQxY2UtZTNhN2IyZDktZDlmY2I0My1iMDY2MjA2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439873701997122782:2985] === CheckClustersList. Ok 2024-11-21T23:09:48.765257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:7172 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:09:48.965132Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPers ... new grpc connection 2024-11-21T23:11:48.883404Z node 13 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2024-11-21T23:11:48.884367Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Write session: send init request: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T23:11:48.885170Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2024-11-21T23:11:48.885356Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:36582 2024-11-21T23:11:48.885378Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:36582 proto=v1 topic=test-topic durationSec=0 2024-11-21T23:11:48.885388Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:11:48.887379Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2024-11-21T23:11:48.887533Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T23:11:48.887551Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T23:11:48.887567Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T23:11:48.887589Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874244104624521:2554] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T23:11:48.891442Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7439874244104624521:2554] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table Test retry state: get retry delay 2024-11-21T23:11:49.104147Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YTc4YjIxZDItOTY0Mzc0NjMtYjU2NzNjMWUtNmY0MzBlNjg=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fxzmw4njpxqc7jbrr7vp2" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2024-11-21T23:11:49.104188Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Write session will restart in 2.000000s 2024-11-21T23:11:49.104315Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Write session: Do CDS request 2024-11-21T23:11:49.104351Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Do schedule cds request after 2000 ms 2024-11-21T23:11:49.091938Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715699. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:11:49.092070Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439874244104624533:2556] TxId: 281474976715699. Ctx: { TraceId: 01jd8fxzmvc2vg6z1xkt6nqbtk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YTc4YjIxZDItOTY0Mzc0NjMtYjU2NzNjMWUtNmY0MzBlNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:11:49.092388Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YTc4YjIxZDItOTY0Mzc0NjMtYjU2NzNjMWUtNmY0MzBlNjg=, ActorId: [13:7439874244104624522:2556], ActorState: ExecuteState, TraceId: 01jd8fxzmvc2vg6z1xkt6nqbtk, Create QueryResponse for error on request, msg: 2024-11-21T23:11:49.094653Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7439874244104624521:2554] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YTc4YjIxZDItOTY0Mzc0NjMtYjU2NzNjMWUtNmY0MzBlNjg=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fxzmw4njpxqc7jbrr7vp2" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2024-11-21T23:11:49.094800Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=YTc4YjIxZDItOTY0Mzc0NjMtYjU2NzNjMWUtNmY0MzBlNjg=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8fxzmw4njpxqc7jbrr7vp2" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2024-11-21T23:11:49.095163Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD 2024-11-21T23:11:49.147150Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720679. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T23:11:49.147288Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7439874240999738929:2431] TxId: 281474976720679. Ctx: { TraceId: 01jd8fxz4ma4r9xavqb8v46a2v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=MzRmZTA1ZDQtZjk5NTE3NGYtNzYxZGE5YTctYWYwNjZjNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T23:11:49.147681Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=MzRmZTA1ZDQtZjk5NTE3NGYtNzYxZGE5YTctYWYwNjZjNGU=, ActorId: [14:7439874240999738914:2431], ActorState: ExecuteState, TraceId: 01jd8fxz4ma4r9xavqb8v46a2v, Create QueryResponse for error on request, msg: 2024-11-21T23:11:49.148599Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd8fxzpxacfgwg1azk6a7gsh" } } YdbStatus: UNAVAILABLE ConsumedRu: 384 } 2024-11-21T23:11:49.541491Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715701. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:11:49.541618Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439874248399591884:2559] TxId: 281474976715701. Ctx: { TraceId: 01jd8fy03v7gaxppj6b12tfvf0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzExMTgyZjAtYWYzNWUzMmUtNTFhNDE4NmUtNDY0MGRkM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:11:49.541942Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YzExMTgyZjAtYWYzNWUzMmUtNTFhNDE4NmUtNDY0MGRkM2Q=, ActorId: [13:7439874248399591881:2559], ActorState: ExecuteState, TraceId: 01jd8fy03v7gaxppj6b12tfvf0, Create QueryResponse for error on request, msg: 2024-11-21T23:11:49.542802Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fy03wec8md00y9k5deqnb" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2024-11-21T23:11:49.877520Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Write session: close. Timeout = 0 ms 2024-11-21T23:11:49.877579Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Write session will now close 2024-11-21T23:11:49.877637Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Write session: aborting 2024-11-21T23:11:49.878459Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T23:11:49.878511Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|b35bb73f-b517cb59-73a37f7f-557ce9c6_0] Write session: destroy 2024-11-21T23:11:49.879438Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720681. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:11:49.879533Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7439874245294706313:2441] TxId: 281474976720681. Ctx: { TraceId: 01jd8fy0dh68vv3pf2kjr3g1y9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=M2E4MGMzZTktOTM0ZDBlMDgtYjE5ZTM5YTMtYTBiMWM4NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:11:49.879803Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=M2E4MGMzZTktOTM0ZDBlMDgtYjE5ZTM5YTMtYTBiMWM4NTU=, ActorId: [14:7439874245294706310:2441], ActorState: ExecuteState, TraceId: 01jd8fy0dh68vv3pf2kjr3g1y9, Create QueryResponse for error on request, msg: 2024-11-21T23:11:49.880489Z node 14 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8fy0djfc4ba1jfy9n64ay3" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2024-11-21T23:11:50.287988Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715703. Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T23:11:50.288127Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7439874252694559253:2560] TxId: 281474976715703. Ctx: { TraceId: 01jd8fy0ap6v8r9k1gvvwfjtzp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YmVjMmFkYzMtOTQ4M2NmMTctODc0NjA3OWYtOWI3OWI4ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2024-11-21T23:11:50.288532Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=YmVjMmFkYzMtOTQ4M2NmMTctODc0NjA3OWYtOWI3OWI4ODY=, ActorId: [13:7439874248399591931:2560], ActorState: ExecuteState, TraceId: 01jd8fy0ap6v8r9k1gvvwfjtzp, Create QueryResponse for error on request, msg: 2024-11-21T23:11:50.289510Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jd8fy0v74ft6qj3y1wwkzn9b" } } YdbStatus: UNAVAILABLE ConsumedRu: 348 } >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 14810, MsgBus: 18348 2024-11-21T23:11:24.466532Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874141108278597:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:24.466615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00130f/r3tmp/tmp7RPyDI/pdisk_1.dat 2024-11-21T23:11:24.970300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:24.970541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:24.975337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:25.004340Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14810, node 1 2024-11-21T23:11:25.091769Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:25.091796Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:25.091808Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:25.091960Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18348 TClient is connected to server localhost:18348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:25.805928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:25.839886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:25.962691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:26.122424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:26.218977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:28.115536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874158288149296:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:28.115644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:28.391231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:28.431731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:28.472963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:28.515314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:28.586733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:28.627781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:28.732682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874158288149800:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:28.732771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:28.733083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874158288149805:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:28.737014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:28.747873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874158288149807:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:29.473313Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874141108278597:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:29.473490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:30.025978Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230690057, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 6017, MsgBus: 21123 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00130f/r3tmp/tmpQTkAcR/pdisk_1.dat 2024-11-21T23:11:31.110886Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:31.198274Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:31.213860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:31.214256Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:31.215409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6017, node 2 2024-11-21T23:11:31.327045Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:31.327069Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:31.327078Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:31.327183Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21123 TClient is connected to server localhost:21123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:31.862470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:31.871861Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:31.899990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:11:31.998027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:11:32.179488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:11:32.250113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:34.728776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874183168991806:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 202 ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:41.508077Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874212497997610:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.508224Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.546495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.581705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.619486Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.697523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.733503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.781941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.846426Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874212497998106:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.846559Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.846939Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874212497998111:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.852135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:41.867587Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439874212497998113:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:42.925888Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439874195318126796:2120];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:42.925960Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:43.990761Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230703588, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 31710, MsgBus: 10377 2024-11-21T23:11:44.803521Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439874224361901371:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00130f/r3tmp/tmpIVRQob/pdisk_1.dat 2024-11-21T23:11:44.860120Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:11:44.933573Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:44.965951Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:44.966044Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:44.967708Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31710, node 4 2024-11-21T23:11:45.035739Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:45.035776Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:45.035786Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:45.035939Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10377 TClient is connected to server localhost:10377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:45.627174Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:45.638794Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:11:45.665910Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:45.743497Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:45.943013Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:46.020415Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:48.550408Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874241541772093:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:48.550498Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:48.598349Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:48.678368Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:48.735713Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:48.785924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:48.834026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:48.905179Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:49.010586Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874245836739897:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:49.010681Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:49.010817Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874245836739902:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:49.014521Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:49.032292Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439874245836739904:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:49.801868Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439874224361901371:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:49.801950Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:51.284192Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230710630, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2024-11-21T23:11:52.334238Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2024-11-21T23:11:52.334499Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' is required" 2024-11-21T23:11:52.334696Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" >> TResourceBroker::TestOverusage >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2024-11-21T23:11:52.873039Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-4 (4 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873163Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-10 (10 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873252Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-16 (16 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873299Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-18 (18 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873334Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-20 (20 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873395Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-24 (24 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873494Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-30 (30 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873565Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-32 (32 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873656Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-38 (38 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873708Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-41 (41 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873807Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-45 (45 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873870Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-47 (47 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.873975Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-51 (51 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874050Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-54 (54 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874159Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-59 (59 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874212Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-61 (61 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874283Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-64 (64 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874458Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-70 (70 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874525Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-71 (71 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874637Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-76 (76 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874764Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-82 (82 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874801Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-83 (83 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.874874Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-86 (86 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.875001Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-92 (92 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.875113Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-97 (97 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.875198Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-100 (100 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.875568Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-118 (118 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.875714Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-124 (124 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.875759Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-125 (125 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.875816Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-126 (126 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.875988Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-134 (134 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.876148Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-137 (137 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.876357Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-148 (148 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.876439Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-150 (150 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.876694Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-161 (161 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.876755Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-163 (163 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.876826Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-165 (165 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.876986Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-172 (172 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877041Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-174 (174 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877130Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-178 (178 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877170Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-179 (179 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877224Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-181 (181 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877290Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-184 (184 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877325Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-185 (185 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877379Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-187 (187 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877429Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-189 (189 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877461Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-190 (190 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877554Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-193 (193 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877751Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-201 (201 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877787Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-202 (202 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877865Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-204 (204 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.877972Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-208 (208 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878052Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-211 (211 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878251Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-222 (222 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878312Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-226 (226 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878334Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-227 (227 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878383Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-231 (231 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878466Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-234 (234 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878493Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-235 (235 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878567Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-241 (241 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878627Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-245 (245 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878684Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-247 (247 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878739Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-250 (250 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878777Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-251 (251 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878814Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-252 (252 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878882Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-257 (257 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878919Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-258 (258 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.878952Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-260 (260 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879089Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-269 (269 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879140Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-271 (271 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879328Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-286 (286 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879358Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-287 (287 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879408Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-291 (291 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879429Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-292 (292 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879565Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-298 (298 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879682Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-308 (308 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879704Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-309 (309 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.879767Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-313 (313 by [2:97:2132])' of ... T23:11:52.915246Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-505 (505 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915284Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-506 (506 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915321Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-509 (509 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915381Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-536 (536 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915404Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-556 (556 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915456Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-572 (572 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915492Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-582 (582 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915533Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-616 (616 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915574Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-618 (618 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915600Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-620 (620 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915650Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-662 (662 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915676Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-665 (665 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915710Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-672 (672 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915732Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-676 (676 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915759Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-693 (693 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915846Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-741 (741 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915885Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-781 (781 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915925Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-813 (813 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.915976Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-827 (827 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916016Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-846 (846 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916099Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-857 (857 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916128Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-873 (873 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916151Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-898 (898 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916172Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-902 (902 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916224Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-916 (916 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916264Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-937 (937 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916304Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-939 (939 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916362Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-970 (970 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916449Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-64 (64 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916554Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-148 (148 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916587Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-150 (150 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916644Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-178 (178 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916671Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-181 (181 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916693Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-187 (187 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916716Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-189 (189 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916814Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-234 (234 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916852Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-235 (235 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916901Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-251 (251 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916935Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-260 (260 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.916979Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-298 (298 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917057Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-373 (373 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917083Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-388 (388 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917110Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-398 (398 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917161Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-409 (409 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917187Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-411 (411 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917224Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-437 (437 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917248Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-457 (457 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917271Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-464 (464 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917304Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-484 (484 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917323Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-489 (489 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917375Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-502 (502 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917409Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-521 (521 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917445Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-537 (537 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917515Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-608 (608 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917552Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-623 (623 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917595Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-639 (639 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917635Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-673 (673 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917684Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-686 (686 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917709Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-694 (694 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917731Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-718 (718 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917757Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-732 (732 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917785Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-739 (739 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917830Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-763 (763 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917885Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-821 (821 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917923Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-825 (825 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.917969Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-844 (844 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918013Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-869 (869 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918054Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-880 (880 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918079Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-893 (893 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918105Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-914 (914 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918145Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-917 (917 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918195Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-924 (924 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918251Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-932 (932 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918291Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-948 (948 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918313Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-949 (949 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918359Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-971 (971 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918425Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-973 (973 by [2:97:2132])' of unknown type 'wrong' to default queue 2024-11-21T23:11:52.918459Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-979 (979 by [2:97:2132])' of unknown type 'wrong' to default queue >> TResourceBrokerInstant::TestErrors [GOOD] >> TResourceBrokerInstant::TestMerge |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TResourceBroker::TestCounters >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> TxUsage::WriteToTopic_Demo_9 >> TResourceBrokerInstant::TestMerge [GOOD] >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation >> TxUsage::WriteToTopic_Demo_23_RestartNo >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> BootstrapperTest::RestartUnavailableTablet >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> TResourceBroker::TestChangeTaskType [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge [GOOD] Test command err: 2024-11-21T23:11:53.475713Z node 1 :RESOURCE_BROKER ERROR: FinishTaskInstant failed for task 2: cannot finish unknown task >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::ReadWithRestarts >> TTabletPipeTest::TestConnectReject >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> TTabletResolver::NodeProblem >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset >> TTabletPipeTest::TestSendAfterReboot >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 5317, MsgBus: 1643 2024-11-21T23:11:23.872982Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874133919918529:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:23.877292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001314/r3tmp/tmpwt1xYs/pdisk_1.dat 2024-11-21T23:11:24.277235Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5317, node 1 2024-11-21T23:11:24.285768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:24.285883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:24.288635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:24.362948Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:24.362973Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:24.362984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:24.363066Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1643 TClient is connected to server localhost:1643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:24.932969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:24.965745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:25.097150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:25.272738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:25.369808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:26.934313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874146804822013:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:26.934425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:27.196528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:27.225028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:27.304954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:27.357752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:27.401740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:27.454981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:27.496063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874151099789813:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:27.496226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874151099789808:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:27.496283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:27.499008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:27.509265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874151099789815:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:29.155890Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874133919918529:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:29.156153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:29.448306Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230688888, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 27731, MsgBus: 2534 2024-11-21T23:11:30.373816Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874163244868199:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:30.373871Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001314/r3tmp/tmpBkjmk2/pdisk_1.dat 2024-11-21T23:11:30.510186Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:30.523230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:30.523305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:30.524810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27731, node 2 2024-11-21T23:11:30.678620Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:30.678650Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:30.678662Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:30.678799Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2534 TClient is connected to server localhost:2534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:31.124691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:31.130962Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:11:31.142655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:31.241793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:31.412757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:31.485974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:33.912251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:74 ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:42.471897Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874217047291127:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:42.472011Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:42.519580Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:42.554335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:42.590535Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:42.629530Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:42.665494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:42.727681Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:42.788936Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874217047291621:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:42.789085Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:42.790509Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874217047291626:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:42.794474Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:42.803437Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439874217047291629:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:11:43.379052Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439874199867420244:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:43.379142Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:46.800786Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230704827, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 28891, MsgBus: 18004 2024-11-21T23:11:47.582150Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439874236806152013:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:47.582207Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001314/r3tmp/tmp7BTIO7/pdisk_1.dat 2024-11-21T23:11:47.709836Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:47.737600Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:47.737698Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:47.739638Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28891, node 4 2024-11-21T23:11:47.805446Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:47.805470Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:47.805482Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:47.805601Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18004 TClient is connected to server localhost:18004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:48.426178Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:48.433111Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:11:48.443621Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:48.527142Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:48.705054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:11:48.774541Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.285142Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874253986022898:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.285253Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.343067Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.386004Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.471766Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.555501Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.635480Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.680684Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.740851Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874253986023401:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.741007Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.741294Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874253986023406:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.745949Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:51.767598Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439874253986023408:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:52.603113Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439874236806152013:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:52.603188Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:53.863790Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230713528, txId: 281474976710671] shutting down |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> TTabletResolver::NodeProblem [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 20877, MsgBus: 17647 2024-11-21T23:11:25.447321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874145158391047:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:25.447418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001307/r3tmp/tmpZq9MrB/pdisk_1.dat 2024-11-21T23:11:25.880705Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20877, node 1 2024-11-21T23:11:25.931376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:25.931658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:25.935658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:25.954817Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:25.954859Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:25.954868Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:25.955358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17647 TClient is connected to server localhost:17647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:26.485773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:11:26.507437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:11:26.649478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:11:26.823337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:26.890622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:11:28.624731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874158043294630:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:28.624894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:28.981120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.073194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.131928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.177075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.257886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.337659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.457509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874162338262435:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.457617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.460449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874162338262440:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.466601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:29.481313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874162338262442:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:30.452018Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874145158391047:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:30.452090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:30.593908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:11:31.360600Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230691394, txId: 281474976710675] shutting down 864000000000 Trying to start YDB, gRPC: 26467, MsgBus: 62176 2024-11-21T23:11:32.027604Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874174299651260:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:32.027669Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001307/r3tmp/tmp1BsNSk/pdisk_1.dat 2024-11-21T23:11:32.175019Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:32.187251Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:32.187329Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:32.191025Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26467, node 2 2024-11-21T23:11:32.264695Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:32.264719Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:32.264733Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:32.264855Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62176 TClient is connected to server localhost:62176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:32.799828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:32.806574Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:11:32.828148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:32.894384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:33.083759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:33.187953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose its ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:42.906791Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874217740377028:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:42.906880Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:42.958716Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:42.994960Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.050199Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.096330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.142811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.210108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:43.256809Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874222035344817:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:43.256887Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:43.257133Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874222035344823:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:43.261062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:43.272155Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439874222035344825:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:11:44.301970Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439874204855473595:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:44.302034Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:46.765550Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230705212, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 19855, MsgBus: 20444 2024-11-21T23:11:47.573451Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439874237385815604:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:47.573548Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001307/r3tmp/tmpCfURcu/pdisk_1.dat 2024-11-21T23:11:47.689349Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:47.703712Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:47.703838Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:47.705261Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19855, node 4 2024-11-21T23:11:47.770158Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:47.770191Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:47.770207Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:47.770352Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20444 TClient is connected to server localhost:20444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:48.309022Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:48.318157Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:11:48.330537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:48.428854Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:11:48.659308Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:48.778356Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.535828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874254565686499:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.535954Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.602305Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.640997Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.707229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.781507Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.826368Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.864100Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:51.970299Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874254565687001:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.970541Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.970896Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874254565687007:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.978369Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:51.993647Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439874254565687009:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:11:52.585278Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439874237385815604:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:52.585344Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:54.262252Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230713836, txId: 281474976715671] shutting down >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletPipeTest::TestOpen >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> BootstrapperTest::UnavailableStateStorage [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2024-11-21T23:11:55.703051Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.703299Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:201:2134] CurrentLeaderTablet: [1:202:2135] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T23:11:55.703339Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T23:11:55.703392Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:201:2134] 2024-11-21T23:11:55.703612Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.703853Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:207:2138] CurrentLeaderTablet: [1:208:2139] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T23:11:55.703905Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T23:11:55.703954Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2138] 2024-11-21T23:11:55.705231Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.705293Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:201:2134] 2024-11-21T23:11:55.705479Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.705528Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2138] 2024-11-21T23:11:55.705741Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2024-11-21T23:11:55.705797Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:201:2134] by NodeId 2024-11-21T23:11:55.705860Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.706053Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:217:2092] CurrentLeaderTablet: [2:218:2093] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T23:11:55.706105Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T23:11:55.706151Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:217:2092] 2024-11-21T23:11:55.706418Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:207:2138] by NodeId 2024-11-21T23:11:55.706498Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.706721Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:223:2094] CurrentLeaderTablet: [2:224:2095] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T23:11:55.706775Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T23:11:55.706833Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T23:11:55.708471Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2024-11-21T23:11:55.708543Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.708590Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:217:2092] 2024-11-21T23:11:55.708826Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.708873Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T23:11:55.709103Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2024-11-21T23:11:55.709152Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:217:2092] by NodeId 2024-11-21T23:11:55.709212Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.709451Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:235:2092] CurrentLeaderTablet: [3:236:2093] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T23:11:55.709499Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2024-11-21T23:11:55.709547Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:235:2092] 2024-11-21T23:11:55.709824Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.709894Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2024-11-21T23:11:55.710125Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2024-11-21T23:11:55.710176Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.710223Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:235:2092] 2024-11-21T23:11:55.710502Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:223:2094] by NodeId 2024-11-21T23:11:55.710576Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2024-11-21T23:11:55.710826Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:241:2094] CurrentLeaderTablet: [3:242:2095] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2024-11-21T23:11:55.710869Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2024-11-21T23:11:55.710912Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] >> TTabletPipeTest::TestTwoNodes >> TTabletPipeTest::TestInterconnectSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:155:2058] recipient: [1:153:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:155:2058] recipient: [1:153:2135] Leader for TabletID 9437184 is [1:161:2139] sender: [1:162:2058] recipient: [1:153:2135] Leader for TabletID 9437185 is [0:0:0] sender: [2:165:2049] recipient: [2:156:2093] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:165:2049] recipient: [2:156:2093] Leader for TabletID 9437185 is [2:177:2096] sender: [2:180:2049] recipient: [2:156:2093] Leader for TabletID 9437184 is [1:161:2139] sender: [1:205:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [1:207:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [2:209:2049] recipient: [2:41:2053] Leader for TabletID 9437185 is [2:177:2096] sender: [2:210:2049] recipient: [2:150:2092] Leader for TabletID 9437185 is [2:177:2096] sender: [1:213:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:177:2096] sender: [2:215:2049] recipient: [2:41:2053] Leader for TabletID 9437185 is [2:177:2096] sender: [2:216:2049] recipient: [2:214:2109] Leader for TabletID 9437185 is [2:217:2110] sender: [2:218:2049] recipient: [2:214:2109] Leader for TabletID 9437185 is [2:217:2110] sender: [1:247:2058] recipient: [1:15:2062] >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> KqpRm::ManyTasks >> TTabletPipeTest::TestOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:106:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:107:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:115:2057] recipient: [1:102:2135] Leader for TabletID 9437185 is [1:117:2145] sender: [1:119:2057] recipient: [1:103:2136] Leader for TabletID 9437184 is [1:114:2143] sender: [1:154:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:158:2057] recipient: [1:99:2134] Leader for TabletID 9437185 is [1:117:2145] sender: [1:161:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:117:2145] sender: [1:163:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:165:2057] recipient: [1:162:2174] Leader for TabletID 9437185 is [1:164:2175] sender: [1:193:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:196:2057] recipient: [1:98:2133] Leader for TabletID 9437184 is [1:114:2143] sender: [1:199:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:114:2143] sender: [1:200:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:202:2057] recipient: [1:198:2198] Leader for TabletID 9437184 is [1:201:2199] sender: [1:230:2057] recipient: [1:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: 2024-11-21T23:11:56.184114Z node 3 :PIPE_SERVER ERROR: [9437185] NodeDisconnected NodeId# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2024-11-21T23:11:55.893894Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2024-11-21T23:11:55.894539Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2024-11-21T23:11:55.894590Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.170556s 2024-11-21T23:11:56.026558Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> KqpRm::NodesMembershipByExchanger >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> KqpRm::SingleTask |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] >> TxUsage::WriteToTopic_Demo_12 [GOOD] >> TTabletPipeTest::TestTwoNodes [GOOD] >> KqpRm::Reduce >> KqpRm::SnapshotSharingByExchanger >> StatisticsSaveLoad::Simple [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:217:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:144:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:144:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:152:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:153:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:153:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:158:2178] Leader for TabletID 72057594037927937 is [11:161:2179] sender: [11:162:2057] recipient: [11:158:2178] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:161:2179] Leader for TabletID 72057594037927937 is [11:161:2179] sender: [11:231:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:141:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:145:2057] recipient: [17:143:2166] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:147:2057] recipient: [17:143:2166] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:146:2167] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:216:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:142:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:145:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:146:2057] recipient: [18:144:2166] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:148:2057] recipient: [18:144:2166] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:147:2167] Leader for TabletID 72057594037927937 is [18:147:2167] sender: [18:217:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:147:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:150:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:151:2057] recipient: [19:149:2171] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:153:2057] recipient: [19:149:2171] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:152:2172] Leader for TabletID 72057594037927937 is [19:152:2172] sender: [19:222:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:147:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:150:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:151:2057] recipient: [20:149:2171] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:153:2057] recipient: [20:149:2171] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:152:2172] Leader for TabletID 72057594037927937 is [20:152:2172] sender: [20:222:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:148:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:151:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:152:2057] recipient: [21:150:2171] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:154:2057] recipient: [21:150:2171] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:153:2172] Leader for TabletID 72057594037927937 is [21:153:2172] sender: [21:201:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:150:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:153:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:154:2057] recipient: [22:152:2173] Leader for TabletID 72057594037927937 is [22:155:2174] sender: [22:156:2057] recipient: [22:152:2173] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:155:2174] Leader for TabletID 72057594037927937 is [22:155:2174] sender: [22:225:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:150:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:152:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:154:2057] recipient: [23:153:2173] Leader for TabletID 72057594037927937 is [23:155:2174] sender: [23:156:2057] recipient: [23:153:2173] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:155:2174] Leader for TabletID 72057594037927937 is [23:155:2174] sender: [23:225:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:151:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:154:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:155:2057] recipient: [24:153:2173] Leader for TabletID 72057594037927937 is [24:156:2174] sender: [24:157:2057] recipient: [24:153:2173] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:156:2174] Leader for TabletID 72057594037927937 is [24:156:2174] sender: [24:226:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:156:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:159:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:160:2057] recipient: [25:158:2178] Leader for TabletID 72057594037927937 is [25:161:2179] sender: [25:162:2057] recipient: [25:158:2178] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:161:2179] Leader for TabletID 72057594037927937 is [25:161:2179] sender: [25:231:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:156:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:159:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:160:2057] recipient: [26:158:2178] Leader for TabletID 72057594037927937 is [26:161:2179] sender: [26:162:2057] recipient: [26:158:2178] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:161:2179] Leader for TabletID 72057594037927937 is [26:161:2179] sender: [26:231:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] >> Describe::DescribePartitionPermissions [GOOD] >> LocalPartition::Basic >> TxUsage::WriteToTopic_Demo_13 >> KqpRm::ManyTasks [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> KqpRm::SingleTask [GOOD] >> KqpRm::Reduce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2024-11-21T23:11:57.555148Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:57.555673Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001ce7/r3tmp/tmpIR3cJF/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:57.557573Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001ce7/r3tmp/tmpIR3cJF/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001ce7/r3tmp/tmpIR3cJF/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9240773667570310423 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:57.602299Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:57.602599Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:57.618616Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:57.618742Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:57.618938Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:57.619022Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:57.619179Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:57.619212Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:57.619245Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:57.619264Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:57.619459Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:57.638156Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230717 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:57.638360Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:57.638449Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230717 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:57.638831Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:57.638977Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:57.639097Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:57.639133Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:57.639227Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230717 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:57.639418Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:57.639454Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:57.639532Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230717 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:57.640082Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:57.640155Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:57.640433Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:57.640662Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:57.640859Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:57.640997Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:57.641122Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:57.641217Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:57.643551Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.643614Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.643679Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.643719Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.643764Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:448:2330])) 2024-11-21T23:11:57.643964Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.644164Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.644191Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.644219Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.644248Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-2 (2 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.644272Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:448:2330])) 2024-11-21T23:11:57.644294Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.644386Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-3-3 (3 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.644425Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-3-3 (3 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.644449Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.644481Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-3-3 (3 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.644509Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:448:2330])) 2024-11-21T23:11:57.644528Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.644620Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-4-4 (4 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.644641Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-4-4 (4 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.644662Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.644695Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-4-4 (4 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.644735Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:448:2330])) 2024-11-21T23:11:57.644761Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.644867Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-5-5 (5 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.644902Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-5-5 (5 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.644938Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.644961Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-5-5 (5 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.644985Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:448:2330])) 2024-11-21T23:11:57.645008Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.645097Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-6-6 (6 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.645117Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-6-6 (6 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.645139Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.645159Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-6-6 (6 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.645181Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:448:2330])) 2024-11-21T23:11:57.645214Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.645301Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-7-7 (7 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.645339Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-7-7 (7 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.645378Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.645410Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-7-7 (7 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.645445Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:448:2330])) 2024-11-21T23:11:57.645467Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.645553Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-8-8 (8 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.645576Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-8-8 (8 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.645598Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.645618Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-8-8 (8 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.645644Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:448:2330])) 2024-11-21T23:11:57.645664Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.645762Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-9-9 (9 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:57.645787Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-9-9 (9 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.645813Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:57.645836Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-9-9 (9 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:57.645858Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:448:2330])) 2024-11-21T23:11:57.645879Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:57.645980Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:448:2330]) (release resources {0, 100}) 2024-11-21T23:11:57.646020Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:448:2330])) 2024-11-21T23:11:57.646059Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:102:2057] recipient: [1:100:2134] Leader for TabletID 9437184 is [1:106:2138] sender: [1:107:2057] recipient: [1:100:2134] Leader for TabletID 9437184 is [1:106:2138] sender: [1:126:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [1:162:2057] recipient: [1:160:2167] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:162:2057] recipient: [1:160:2167] Leader for TabletID 9437185 is [1:166:2171] sender: [1:167:2057] recipient: [1:160:2167] Leader for TabletID 9437185 is [1:166:2171] sender: [1:202:2057] recipient: [1:14:2061] >> TPQCachingProxyTest::TestPublishAndForget >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> TPQCachingProxyTest::TestDeregister >> StatisticsSaveLoad::Delete [GOOD] >> TPQCachingProxyTest::OutdatedSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2024-11-21T23:11:48.777951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:48.778199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:48.778301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000ea2/r3tmp/tmpqAOCEW/pdisk_1.dat 2024-11-21T23:11:49.193499Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24817, node 1 2024-11-21T23:11:49.440555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:49.440629Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:49.440669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:49.441190Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:11:49.477222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:49.574043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:49.574198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:49.597367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17015 2024-11-21T23:11:50.238818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:53.613926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:53.614072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:53.660085Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:11:53.665313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:53.868181Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:53.928335Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:11:53.928426Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:11:53.965213Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:11:53.966258Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:11:53.966515Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:11:53.966575Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:11:53.966652Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:11:53.966718Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:11:53.966774Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:11:53.966831Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:11:53.967219Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:11:54.210995Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:11:54.211122Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:11:54.216923Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T23:11:54.226463Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T23:11:54.226835Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T23:11:54.230272Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T23:11:54.254456Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:11:54.254529Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:11:54.254628Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T23:11:54.270636Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:54.270746Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:54.276299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:11:54.285729Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:11:54.285881Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:11:54.301262Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:11:54.321255Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:54.349353Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:11:54.721890Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:11:54.918891Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:11:55.727494Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:11:55.727984Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T23:11:55.739784Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T23:11:55.744310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2160:3035], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:55.744426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2176:3040], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:55.744521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:55.752237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037889 2024-11-21T23:11:55.793217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2180:3043], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:11:56.174748Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2286:3083]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:11:56.175002Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:11:56.175097Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2288:3085] 2024-11-21T23:11:56.175171Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2288:3085] 2024-11-21T23:11:56.175684Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2289:2797] 2024-11-21T23:11:56.175928Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2288:3085], server id = [2:2289:2797], tablet id = 72075186224037897, status = OK 2024-11-21T23:11:56.176093Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2289:2797], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T23:11:56.176152Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T23:11:56.176350Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:11:56.176417Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2286:3083], StatRequests.size() = 1 2024-11-21T23:11:56.354440Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=OThiYzEzNWYtOTM2NmIyNS0yNjg3YmFjZC0xOTY5YmVjMg==, TxId: 2024-11-21T23:11:56.354532Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=OThiYzEzNWYtOTM2NmIyNS0yNjg3YmFjZC0xOTY5YmVjMg==, TxId: 2024-11-21T23:11:56.355565Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T23:11:56.358351Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2024-11-21T23:11:56.378970Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2317:3106]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:11:56.379171Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:11:56.379222Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2317:3106], StatRequests.size() = 1 2024-11-21T23:11:56.503647Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZTIyMzlmNDEtZDU2YWJhODQtZTE3N2VmMDItMjExMDExNTA=, TxId: 01jd8fy722b46w4vgxnwjbxztk 2024-11-21T23:11:56.503832Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZTIyMzlmNDEtZDU2YWJhODQtZTE3N2VmMDItMjExMDExNTA=, TxId: 01jd8fy722b46w4vgxnwjbxztk 2024-11-21T23:11:56.507171Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T23:11:56.510044Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2024-11-21T23:11:56.529297Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MTJhMzA5Ni05MmE4NTY1YS02MDJjM2ExOS1lM2E3NDFjMg==, TxId: 01jd8fy72zefttyjbv4hsyqfvf 2024-11-21T23:11:56.529429Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MTJhMzA5Ni05MmE4NTY1YS02MDJjM2ExOS1lM2E3NDFjMg==, TxId: 01jd8fy72zefttyjbv4hsyqfvf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2024-11-21T23:11:40.735299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:40.741815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:40.741991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002230/r3tmp/tmpdpMxGw/pdisk_1.dat 2024-11-21T23:11:41.145038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.194326Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:41.245096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:41.245235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:41.256883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:41.377768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:41.736361Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2024-11-21T23:11:41.746230Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2024-11-21T23:11:41.775442Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:701:2585], subTag: 1} TUpsertActor finished in 0.028750s, errors=0 2024-11-21T23:11:41.775803Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2024-11-21T23:11:41.775924Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2024-11-21T23:11:41.777078Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2024-11-21T23:11:41.777252Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} started fullscan actor# [1:713:2597] 2024-11-21T23:11:41.777355Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Bootstrap called, sample# 100 2024-11-21T23:11:41.777396Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Connect to# 72075186224037888 called 2024-11-21T23:11:41.778310Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} Handle TEvClientConnected called, Status# OK 2024-11-21T23:11:41.779689Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:710:2594], subTag: 1} finished in 0.001231s, sampled# 100, iter finished# 1, oks# 100 2024-11-21T23:11:41.779883Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} received keyCount# 100 2024-11-21T23:11:41.780153Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:701:2585], subTag: 3} started# 10 actors each with inflight# 1 2024-11-21T23:11:41.780221Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} Bootstrap called 2024-11-21T23:11:41.780277Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780345Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} Bootstrap called 2024-11-21T23:11:41.780378Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780408Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} Bootstrap called 2024-11-21T23:11:41.780430Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780453Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} Bootstrap called 2024-11-21T23:11:41.780487Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780523Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} Bootstrap called 2024-11-21T23:11:41.780553Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780583Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} Bootstrap called 2024-11-21T23:11:41.780604Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780628Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} Bootstrap called 2024-11-21T23:11:41.780652Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780692Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} Bootstrap called 2024-11-21T23:11:41.780728Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780761Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} Bootstrap called 2024-11-21T23:11:41.780782Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.780807Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} Bootstrap called 2024-11-21T23:11:41.780830Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2024-11-21T23:11:41.784005Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 2} session: ydb://session/3?node_id=1&id=MWNmMmQ2YTktZDI5NTBlMTUtNmE2OTY1MjQtZjA2MWEzYg== 2024-11-21T23:11:41.784297Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 3} session: ydb://session/3?node_id=1&id=OTBlZTQ0ZmUtMWI3ZGEyYTQtZTQwYzNjM2EtYWE4ZWY2ZDY= 2024-11-21T23:11:41.785936Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 4} session: ydb://session/3?node_id=1&id=NmFlYjU1NmQtNzNkZDEzMDQtNTYyYTM1OWQtNTUyYmU2M2Q= 2024-11-21T23:11:41.787346Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 5} session: ydb://session/3?node_id=1&id=NTMzZTYwYTktYmI3MTk2YzYtZTU2ZDUwYWQtMjRlZGExMmI= 2024-11-21T23:11:41.788710Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 6} session: ydb://session/3?node_id=1&id=NjdhNmE1ZGMtZDAyZDQ2ODktZjY4MThlMjMtNTYwMjEzY2U= 2024-11-21T23:11:41.790040Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 7} session: ydb://session/3?node_id=1&id=ZjBkNDQzNGQtMThmNGViNGMtMzRiOWJhNmUtYmNlNmY2MWM= 2024-11-21T23:11:41.792579Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 8} session: ydb://session/3?node_id=1&id=ZTgwNjM3LTk2OGYwNGRlLTVmZDE5MjRhLWU5MWIxMmFh 2024-11-21T23:11:41.792750Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 9} session: ydb://session/3?node_id=1&id=ODcxMDVlZGQtODVjMGUxOTctYmVjYWFkMTctOTEwMjZmZTk= 2024-11-21T23:11:41.795197Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 10} session: ydb://session/3?node_id=1&id=Yjg1ZjhjMzItZDYxNWJiNmMtOTNhYzY2NGEtN2JhZjI1ZGM= 2024-11-21T23:11:41.795304Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:710:2594], subTag: 11} session: ydb://session/3?node_id=1&id=ZDZhMTYyZDUtMjdiYjRkNGYtODc5NjkzOTUtMzY0ZDVjYjk= 2024-11-21T23:11:41.800965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:738:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.801103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:774:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.801173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:775:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.801307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:776:2654], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.801354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:777:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.801427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:778:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.801472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.801515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:41.801696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: R ... yMDEtMjlmNzIzNWItZTAxMzk1ODQtNWI2OWFjY2M= 2024-11-21T23:11:51.547858Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 3} session: ydb://session/3?node_id=2&id=NTY0MmEzMTEtN2E0M2ZhYjctYzZiZDk0YmYtMWE2MWEyNjU= 2024-11-21T23:11:51.550691Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 4} session: ydb://session/3?node_id=2&id=NmRjZTc1YWEtZThmYjk0MTEtZDFkYmNmYTktNzBhNmFiMA== 2024-11-21T23:11:51.550847Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 5} session: ydb://session/3?node_id=2&id=ODIyYjc0ZTYtODRkYWQ3MmItMzZiMWJlZDEtOTMxMzg5YTg= 2024-11-21T23:11:51.553581Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 6} session: ydb://session/3?node_id=2&id=OTI5OWE1MDEtMWNiZDlmMzItN2E5NDQwNmMtYTExMWZmOWY= 2024-11-21T23:11:51.553691Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 7} session: ydb://session/3?node_id=2&id=M2Q5YjIwZGMtNmRmMDg2NDktYzI3MWMzZWUtZGM3MWIyNw== 2024-11-21T23:11:51.555267Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 8} session: ydb://session/3?node_id=2&id=YjJlMGVlYmItMWQ4M2M3YzktMWQ3NTY1ZWYtNWZmNjExYzY= 2024-11-21T23:11:51.556720Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 9} session: ydb://session/3?node_id=2&id=Y2U2ZDg4YTktOTFhODZkOGYtYzUzNjIwOGYtZDY4ZmI0ZQ== 2024-11-21T23:11:51.559267Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 10} session: ydb://session/3?node_id=2&id=YzBiYjQwZGQtMmU1YTE0ZWEtYThmZTk2ZGItOTRlYTFmYzI= 2024-11-21T23:11:51.559367Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 11} session: ydb://session/3?node_id=2&id=ZjQxNTJjNTYtNmQ2MzIwZS0zNGRmYTg1ZS1kZmJkYzNiNw== 2024-11-21T23:11:51.564164Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:736:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.564274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:763:2641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.564339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:764:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.564392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:765:2643], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.564441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:766:2644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.564525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:770:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.564574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:772:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.565104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:774:2652], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.566318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:790:2668], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.567395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:795:2673], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.567626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.568494Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:804:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:51.574995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:11:51.786717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:784:2662], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.786834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:785:2663], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.786890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:786:2664], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.786965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:787:2665], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.787024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:788:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.787076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:789:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.787172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:794:2672], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.787229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:803:2681], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.787279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:806:2684], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:51.787332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:822:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:11:52.226098Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 3} finished in 0.678168s, errors=0 2024-11-21T23:11:52.226306Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 3 { Tag: 3 DurationMs: 678 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:52.600081Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 7} finished in 1.046330s, errors=0 2024-11-21T23:11:52.600352Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 7 { Tag: 7 DurationMs: 1046 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:53.000917Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 10} finished in 1.441581s, errors=0 2024-11-21T23:11:53.001229Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 10 { Tag: 10 DurationMs: 1441 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:53.533590Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 11} finished in 1.974159s, errors=0 2024-11-21T23:11:53.533799Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 11 { Tag: 11 DurationMs: 1974 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:54.128056Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 5} finished in 2.577132s, errors=0 2024-11-21T23:11:54.128471Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 5 { Tag: 5 DurationMs: 2577 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:54.771650Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 6} finished in 3.217992s, errors=0 2024-11-21T23:11:54.772044Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 6 { Tag: 6 DurationMs: 3217 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:55.346414Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 4} finished in 3.795622s, errors=0 2024-11-21T23:11:55.346841Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 4 { Tag: 4 DurationMs: 3795 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:55.927251Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 9} finished in 4.370466s, errors=0 2024-11-21T23:11:55.927520Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 9 { Tag: 9 DurationMs: 4370 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:56.617369Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 2} finished in 5.069631s, errors=0 2024-11-21T23:11:56.617694Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 2 { Tag: 2 DurationMs: 5069 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:57.403872Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:709:2593], subTag: 8} finished in 5.848529s, errors=0 2024-11-21T23:11:57.404156Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished: 8 { Tag: 8 DurationMs: 5848 OperationsOK: 100 OperationsError: 0 } 2024-11-21T23:11:57.404230Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:700:2584], subTag: 3} finished in 5.863855s, oks# 1000, errors# 0 2024-11-21T23:11:57.404518Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:709:2593] with tag# 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 8679, MsgBus: 16781 2024-11-21T23:11:25.770167Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874144144536593:2087];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:25.772633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012ff/r3tmp/tmpfQEZ1K/pdisk_1.dat 2024-11-21T23:11:26.153717Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:26.162218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:26.162342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:26.184207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8679, node 1 2024-11-21T23:11:26.238960Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:26.238981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:26.238991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:26.239079Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16781 TClient is connected to server localhost:16781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:26.893718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:26.907541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:11:26.921297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:27.140227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:27.332900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:27.403309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:29.245459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874161324407429:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.245547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.526528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.557750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.595947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.632516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.707423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.769303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:29.856444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874161324407930:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.856536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.856760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874161324407935:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:29.860149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:29.883917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874161324407937:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:30.772785Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874144144536593:2087];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:30.772849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:34.346767Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230692045, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11877, MsgBus: 64688 2024-11-21T23:11:35.398604Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874186588594754:2051];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012ff/r3tmp/tmp1ZQSld/pdisk_1.dat 2024-11-21T23:11:35.498114Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:11:35.580695Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:35.614146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:35.614252Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:35.616204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11877, node 2 2024-11-21T23:11:35.790948Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:35.790979Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:35.790987Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:35.791090Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64688 TClient is connected to server localhost:64688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:36.671889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:11:36.705032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:11:36.808364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:37.026384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:37.121875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:39.555615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: ... able, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:45.193620Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:47.710584Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874239252714237:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:47.710716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:47.785839Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:47.821574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:47.858129Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:47.897825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:47.937058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:48.013470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:48.093704Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874243547682039:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:48.093894Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:48.094286Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439874243547682044:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:48.099340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:48.110613Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439874243547682046:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:11:49.170059Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439874226367810640:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:49.170146Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:50.080722Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230709650, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 6303, MsgBus: 13103 2024-11-21T23:11:50.887226Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439874250666437854:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:50.887298Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012ff/r3tmp/tmpUFvqJu/pdisk_1.dat 2024-11-21T23:11:51.016791Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:51.043827Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:51.043931Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:51.046790Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6303, node 4 2024-11-21T23:11:51.097182Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:51.097206Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:51.097218Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:51.097348Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13103 TClient is connected to server localhost:13103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:11:51.680779Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:51.705967Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:51.793525Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:52.020100Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:52.120325Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:11:54.546529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874267846308745:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:54.546635Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:54.609110Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:11:54.643447Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:11:54.678783Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:11:54.716186Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:11:54.752435Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:11:54.796491Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:11:54.881957Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874267846309244:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:54.882044Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:54.882109Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439874267846309249:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:54.886165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:11:54.898273Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439874267846309251:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:11:55.888140Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439874250666437854:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:11:55.888203Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:11:56.956789Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230716685, txId: 281474976710671] shutting down >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> TPQCachingProxyTest::MultipleSessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2024-11-21T23:11:57.978853Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:57.979344Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001c15/r3tmp/tmp8xu2l1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:57.979958Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001c15/r3tmp/tmp8xu2l1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001c15/r3tmp/tmp8xu2l1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4410619121752580631 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:58.018704Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:58.019005Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:58.033335Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:58.033485Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:58.033663Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:58.033740Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:58.033904Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:58.033948Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:58.033986Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:58.034010Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:58.034218Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.043805Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.044067Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.044166Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.044574Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:58.044739Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:58.044875Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:58.044912Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.045018Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.045191Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:58.045237Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.045316Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.045982Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:58.046105Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.051625Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.051947Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:58.052213Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.052378Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:58.052513Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:58.052623Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:58.055383Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:58.055465Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:58.055545Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:58.055592Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:58.055650Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:448:2330])) 2024-11-21T23:11:58.055897Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:58.056112Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:448:2330]) (release resources {0, 100}) 2024-11-21T23:11:58.056152Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:448:2330])) 2024-11-21T23:11:58.056189Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. >> StatisticsSaveLoad::ForbidAccess [GOOD] >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> TOlap::CreateDropTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2024-11-21T23:11:58.722002Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:11:58.722073Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:11:58.737018Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:11:58.737114Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2024-11-21T23:11:58.737193Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T23:11:58.737238Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T23:11:58.737300Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2024-11-21T23:11:58.737345Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2024-11-21T23:11:58.737385Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T23:11:58.737467Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TPQCachingProxyTest::TestDeregister [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2024-11-21T23:11:58.192157Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:58.192653Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001be0/r3tmp/tmpWhKXVv/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:58.193727Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001be0/r3tmp/tmpWhKXVv/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001be0/r3tmp/tmpWhKXVv/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7510875343922477325 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:58.235741Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:58.236053Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:58.256003Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:58.256131Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:58.256268Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:58.256323Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:58.256450Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:58.256476Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:58.256500Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:58.256518Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:58.256693Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.267424Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.267627Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.267689Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.268000Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:58.268118Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:58.268211Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:58.268236Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.268322Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.268435Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:58.268462Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.268513Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.268984Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:58.269064Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.269375Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.269626Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:58.269903Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.270035Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:58.270174Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:58.270296Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:58.273397Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:58.273492Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:58.273567Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:58.273613Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:58.273675Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:448:2330])) 2024-11-21T23:11:58.273879Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:58.274107Z node 1 :RESOURCE_BROKER DEBUG: Update task kqp-1-1-1 (1 by [1:448:2330]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2024-11-21T23:11:58.274152Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:58.274195Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:448:2330])) 2024-11-21T23:11:58.274234Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2024-11-21T23:11:58.438763Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:58.439319Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001bb7/r3tmp/tmpwoX3LN/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:58.443263Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001bb7/r3tmp/tmpwoX3LN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001bb7/r3tmp/tmpwoX3LN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 764124984786809967 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:58.480671Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:58.480973Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:58.495770Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:58.495922Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:58.496152Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:58.496250Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:58.496398Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:58.496444Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:58.496485Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:58.496507Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:58.496676Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.507814Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.508032Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.508137Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2024-11-21T23:11:58.508494Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:58.508632Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:58.508779Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:58.508813Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.508946Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.509115Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:58.509152Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.509212Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2024-11-21T23:11:58.509778Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:58.509902Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.510345Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.510706Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:58.511083Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.511273Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:58.511469Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:58.511620Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:58.515027Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:448:2330]) priority=0 resources={0, 1000} 2024-11-21T23:11:58.515097Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:58.515151Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:58.515201Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:58.515246Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:448:2330])) 2024-11-21T23:11:58.515408Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2024-11-21T23:11:58.515501Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:448:2330]) priority=0 resources={0, 100000} 2024-11-21T23:11:58.515539Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:58.515578Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task kqp-1-2-2 (2 by [1:448:2330]) 2024-11-21T23:11:58.515610Z node 1 :RESOURCE_BROKER DEBUG: Removing task kqp-1-2-2 (2 by [1:448:2330]) 2024-11-21T23:11:58.515665Z node 1 :KQP_RESOURCE_MANAGER NOTICE: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx largest failed memory allocation: 0B, tx total execution units: 0, started at: 2024-11-21T23:11:58.514949Z } >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TPQCachingProxyTest::MultipleSessions [GOOD] |82.9%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |82.9%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRm::NodesMembershipByExchanger [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TOlap::AlterStore >> TOlap::CreateTable >> TxUsage::WriteToTopic_Demo_29 [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy >> TOlap::Decimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2024-11-21T23:11:59.168144Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:11:59.168241Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:11:59.199099Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:11:59.199219Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T23:11:59.199334Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T23:11:59.199375Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T23:11:59.199473Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2024-11-21T23:11:59.244005Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:11:59.244114Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:11:59.262856Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:11:59.262952Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T23:11:59.263011Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2024-11-21T23:11:59.263130Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2024-11-21T23:11:59.336407Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:11:59.336482Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:11:59.356986Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:11:59.357090Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T23:11:59.357163Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T23:11:59.357205Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T23:11:59.357267Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2024-11-21T23:11:59.493340Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:11:59.493404Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:11:59.520128Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:11:59.520232Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2024-11-21T23:11:59.520308Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2024-11-21T23:11:59.520363Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2024-11-21T23:11:59.520404Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1 2024-11-21T23:11:59.520452Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1 2024-11-21T23:11:59.520504Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2024-11-21T23:11:59.520547Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2024-11-21T23:11:59.520585Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2024-11-21T23:11:57.869847Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:57.870215Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001ce1/r3tmp/tmp4MI8ar/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:57.870751Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001ce1/r3tmp/tmp4MI8ar/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001ce1/r3tmp/tmp4MI8ar/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13426430083611754004 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:57.908908Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:57.909167Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:57.923221Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:57.923353Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:57.923517Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:57.923604Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:57.923752Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:57.923803Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:57.923838Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:57.923867Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:57.924026Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:57.939703Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230717 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:57.939935Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:57.940035Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230717 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:57.940386Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:57.940521Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:57.940633Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:57.940661Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:57.940750Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230717 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:57.940942Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:57.940976Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:57.941035Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230717 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:57.941606Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:57.941730Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:57.942108Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:57.942468Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:57.942797Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:57.942972Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:57.943131Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:57.943326Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:58.873930Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:11:58.874049Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:11:58.874975Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:59.148814Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> TOlap::CreateTableTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2024-11-21T23:11:50.043940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:50.044146Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:50.044201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e1b/r3tmp/tmpKhd1Kv/pdisk_1.dat 2024-11-21T23:11:50.334577Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18229, node 1 2024-11-21T23:11:50.533293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:50.533340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:50.533363Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:50.533655Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:11:50.560988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:50.654874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:50.654961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:50.668274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17061 2024-11-21T23:11:51.323644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:54.574326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:54.574440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:54.617681Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:11:54.621786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:54.853275Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:54.906721Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:11:54.906841Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:11:54.938909Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:11:54.939868Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:11:54.940079Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:11:54.940140Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:11:54.940205Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:11:54.940268Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:11:54.940317Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:11:54.940363Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:11:54.940751Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:11:55.196129Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:11:55.196243Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:11:55.201849Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T23:11:55.211090Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T23:11:55.211495Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T23:11:55.219315Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T23:11:55.240096Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:11:55.240162Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:11:55.240247Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T23:11:55.249286Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:55.249367Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:55.254887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:11:55.266881Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:11:55.267035Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:11:55.281976Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:11:55.301210Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:55.332540Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:11:55.683946Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:11:55.854629Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:11:56.681364Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:11:56.681944Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T23:11:56.703936Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2024-11-21T23:11:56.708571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2160:3035], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:56.708698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2176:3040], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:56.708779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:56.716164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037889 2024-11-21T23:11:56.781173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2180:3043], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:11:57.192899Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2286:3083]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:11:57.193055Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:11:57.193134Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2288:3085] 2024-11-21T23:11:57.193196Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2288:3085] 2024-11-21T23:11:57.193740Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2289:2797] 2024-11-21T23:11:57.193964Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2288:3085], server id = [2:2289:2797], tablet id = 72075186224037897, status = OK 2024-11-21T23:11:57.194164Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2289:2797], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T23:11:57.194232Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T23:11:57.194454Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:11:57.194548Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2286:3083], StatRequests.size() = 1 2024-11-21T23:11:57.366409Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=M2EzM2ExOWYtZjlkNjNhMzEtMWE4NTcyN2MtYjBlNGI5YzY=, TxId: 2024-11-21T23:11:57.366496Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=M2EzM2ExOWYtZjlkNjNhMzEtMWE4NTcyN2MtYjBlNGI5YzY=, TxId: 2024-11-21T23:11:57.367594Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T23:11:57.370126Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T23:11:57.384211Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2317:3106]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:11:57.384412Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:11:57.384459Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2317:3106], StatRequests.size() = 1 2024-11-21T23:11:57.501676Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=Zjc2NTQxMmYtNGIzNTI0ZDUtYTU1ZDZmOTMtZTI0ZjNhYjA=, TxId: 2024-11-21T23:11:57.501744Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=Zjc2NTQxMmYtNGIzNTI0ZDUtYTU1ZDZmOTMtZTI0ZjNhYjA=, TxId: 2024-11-21T23:11:57.502931Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2024-11-21T23:11:57.505419Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2024-11-21T23:11:57.523508Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2349:3122]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:11:57.523680Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T23:11:57.523720Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2349:3122], StatRequests.size() = 1 2024-11-21T23:11:57.633213Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=M2EwMjdlOWQtYWM2MTQ3ODMtZDg2MDYyNDUtODc0MWQ0NmM=, TxId: 01jd8fy85e4g0zvfm1dwkfr6kq 2024-11-21T23:11:57.633344Z node 1 :STATISTICS WARN: [TQueryBase] Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=M2EwMjdlOWQtYWM2MTQ3ODMtZDg2MDYyNDUtODc0MWQ0NmM=, TxId: 01jd8fy85e4g0zvfm1dwkfr6kq >> TOlap::CreateStore >> TOlap::CreateStoreWithDirs |82.9%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2024-11-21T23:11:48.777153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:11:48.777373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:11:48.777443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e7e/r3tmp/tmpP4PmNQ/pdisk_1.dat 2024-11-21T23:11:49.175148Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19701, node 1 2024-11-21T23:11:49.427215Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:11:49.427284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:11:49.427316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:11:49.427778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:11:49.475370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:11:49.575095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:49.575237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:49.590138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18148 2024-11-21T23:11:50.231930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:11:53.621688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:53.621830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:53.661806Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:11:53.669369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:53.865607Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:53.928815Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:11:53.928897Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:11:53.973294Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:11:53.974308Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:11:53.974572Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:11:53.974638Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:11:53.974693Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:11:53.974756Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:11:53.974816Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:11:53.974869Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:11:53.975260Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:11:54.197582Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:11:54.197707Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:11:54.203446Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T23:11:54.212848Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T23:11:54.213228Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T23:11:54.215736Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T23:11:54.237171Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:11:54.237245Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:11:54.237318Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T23:11:54.246169Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:11:54.246280Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:11:54.252206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:11:54.259227Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:11:54.259429Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:11:54.272782Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:11:54.292700Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:11:54.319669Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:11:54.659301Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:11:54.845856Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:11:55.875002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:55.875132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:55.891328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T23:11:56.289631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2432:3069], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:56.289751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:56.291090Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2437:3073]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:11:56.291309Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:11:56.291387Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2439:3075] 2024-11-21T23:11:56.291455Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2439:3075] 2024-11-21T23:11:56.292026Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2440:2944] 2024-11-21T23:11:56.292370Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2439:3075], server id = [2:2440:2944], tablet id = 72075186224037897, status = OK 2024-11-21T23:11:56.292533Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2440:2944], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T23:11:56.292593Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T23:11:56.292754Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:11:56.292814Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2437:3073], StatRequests.size() = 1 2024-11-21T23:11:56.307813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2444:3079], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:56.307906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:56.308160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2449:3084], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:11:56.313079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T23:11:56.489318Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T23:11:56.489404Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T23:11:56.563874Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2439:3075], schemeshard count = 1 2024-11-21T23:11:56.857504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2451:3086], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T23:11:56.979555Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2590:3173]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:11:56.979703Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:11:56.979737Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2590:3173], StatRequests.size() = 1 2024-11-21T23:11:57.052119Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fy6vpbjvttmhjrwsk0snp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWFmYmNhZS02Nzc5M2ZiOC04MzZiOGViNC01MzFmMzc4MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:11:57.204998Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2667:3204], for# user@builtin, access# DescribeSchema 2024-11-21T23:11:57.205097Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2667:3204], for# user@builtin, access# DescribeSchema 2024-11-21T23:11:57.218098Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:2657:3200], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:11:57.220040Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTE3YmNlOTAtYmQwMzVhNDMtNmY3Nzg0MGEtY2QxZjM3ODE=, ActorId: [1:2648:3192], ActorState: ExecuteState, TraceId: 01jd8fy7r4etvc1b4y3cpd4a63, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TOlap::CreateDropStandaloneTable >> TxUsage::WriteToTopic_Demo_30 >> TOlap::CustomDefaultPresets >> TOlap::CreateDropTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] Test command err: 2024-11-21T23:11:52.693231Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 [1:6:2053] 2024-11-21T23:11:52.693525Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:7:2054] worker 0 2024-11-21T23:11:52.693564Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:8:2055] worker 1 2024-11-21T23:11:52.693597Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:9:2056] worker 2 2024-11-21T23:11:52.693634Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:10:2057] worker 3 2024-11-21T23:11:52.693670Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:11:2058] worker 4 2024-11-21T23:11:52.693715Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:12:2059] worker 5 2024-11-21T23:11:52.693756Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:13:2060] worker 6 2024-11-21T23:11:52.693786Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:14:2061] worker 7 2024-11-21T23:11:52.693819Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:15:2062] worker 8 2024-11-21T23:11:52.693848Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:6:2053] self [1:16:2063] worker 9 Sending message to [1:8:2055] from [1:6:2053] id 1 Sending message to [1:9:2056] from [1:6:2053] id 2 Sending message to [1:10:2057] from [1:6:2053] id 3 Sending message to [1:11:2058] from [1:6:2053] id 4 Sending message to [1:12:2059] from [1:6:2053] id 5 Sending message to [1:13:2060] from [1:6:2053] id 6 Sending message to [1:14:2061] from [1:6:2053] id 7 Sending message to [1:15:2062] from [1:6:2053] id 8 Sending message to [1:16:2063] from [1:6:2053] id 9 Sending message to [1:7:2054] from [1:6:2053] id 10 2024-11-21T23:11:53.543176Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [1:16:2063] 2024-11-21T23:11:53.543407Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [1:7:2054] 2024-11-21T23:11:53.543461Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [1:8:2055] 2024-11-21T23:11:53.543514Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [1:9:2056] 2024-11-21T23:11:53.543560Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [1:10:2057] 2024-11-21T23:11:53.543633Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [1:11:2058] 2024-11-21T23:11:53.543708Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [1:12:2059] 2024-11-21T23:11:53.543766Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [1:13:2060] 2024-11-21T23:11:53.543834Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [1:14:2061] 2024-11-21T23:11:53.543885Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [1:15:2062] 2024-11-21T23:11:53.543934Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:14:2061] 2024-11-21T23:11:53.545440Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:14:2061] 2024-11-21T23:11:53.574988Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:14:2061] Initiator [1:6:2053] 2024-11-21T23:11:53.596509Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:15:2062] 2024-11-21T23:11:53.598065Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:15:2062] 2024-11-21T23:11:53.628233Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:15:2062] Initiator [1:6:2053] 2024-11-21T23:11:53.649420Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:16:2063] 2024-11-21T23:11:53.650973Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:16:2063] 2024-11-21T23:11:53.680513Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:16:2063] Initiator [1:6:2053] 2024-11-21T23:11:53.702049Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:6:2053] 2024-11-21T23:11:53.702224Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:6:2053] 2024-11-21T23:11:53.707376Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [1:7:2054] 2024-11-21T23:11:53.708984Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [1:7:2054] 2024-11-21T23:11:53.738305Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:7:2054] Initiator [1:6:2053] 2024-11-21T23:11:53.760143Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:8:2055] 2024-11-21T23:11:53.761821Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:8:2055] 2024-11-21T23:11:53.793041Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:8:2055] Initiator [1:6:2053] 2024-11-21T23:11:53.816173Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:9:2056] 2024-11-21T23:11:53.817830Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:9:2056] 2024-11-21T23:11:53.846601Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:9:2056] Initiator [1:6:2053] 2024-11-21T23:11:53.867183Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:10:2057] 2024-11-21T23:11:53.868788Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:10:2057] 2024-11-21T23:11:53.894826Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:10:2057] Initiator [1:6:2053] 2024-11-21T23:11:53.917238Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:11:2058] 2024-11-21T23:11:53.918963Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:11:2058] 2024-11-21T23:11:53.949258Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:11:2058] Initiator [1:6:2053] 2024-11-21T23:11:53.970438Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:12:2059] 2024-11-21T23:11:53.971956Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:12:2059] 2024-11-21T23:11:53.999931Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:12:2059] Initiator [1:6:2053] 2024-11-21T23:11:54.022387Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:13:2060] 2024-11-21T23:11:54.024108Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:13:2060] 2024-11-21T23:11:54.053588Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:13:2060] Initiator [1:6:2053] 2024-11-21T23:11:54.075727Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:6:2053] 2024-11-21T23:11:54.075914Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:6:2053] 2024-11-21T23:11:54.081445Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:6:2053] 2024-11-21T23:11:54.081576Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:6:2053] 2024-11-21T23:11:54.086583Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [1:6:2053] 2024-11-21T23:11:54.086712Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [1:6:2053] 2024-11-21T23:11:54.092429Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:6:2053] 2024-11-21T23:11:54.092565Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:6:2053] 2024-11-21T23:11:54.098191Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:6:2053] 2024-11-21T23:11:54.098326Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:6:2053] 2024-11-21T23:11:54.103948Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:6:2053] 2024-11-21T23:11:54.104088Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:6:2053] 2024-11-21T23:11:54.111362Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:6:2053] 2024-11-21T23:11:54.111526Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:6:2053] 2024-11-21T23:11:54.116636Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:6:2053] 2024-11-21T23:11:54.116777Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:6:2053] 2024-11-21T23:11:54.122150Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:6:2053] 2024-11-21T23:11:54.122290Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:6:2053] 2024-11-21T23:11:54.127544Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:6:2053] Initiator [1:5:2052] TEST 2 10 duration 1.566947s 2024-11-21T23:11:54.385635Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 [2:6:2053] 2024-11-21T23:11:54.386130Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:7:2054] worker 0 2024-11-21T23:11:54.386177Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:8:2055] worker 1 2024-11-21T23:11:54.386206Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:9:2056] worker 2 2024-11-21T23:11:54.386236Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:10:2057] worker 3 2024-11-21T23:11:54.386280Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:11:2058] worker 4 2024-11-21T23:11:54.386328Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:12:2059] worker 5 2024-11-21T23:11:54.386356Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:13:2060] worker 6 2024-11-21T23:11:54.386385Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:14:2061] worker 7 2024-11-21T23:11:54.386438Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:15:2062] worker 8 2024-11-21T23:11:54.386464Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:16:2063] worker 9 2024-11-21T23:11:54.386494Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:17:2064] worker 10 2024-11-21T23:11:54.386521Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:18:2065] worker 11 2024-11-21T23:11:54.386562Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:19:2066] worker 12 2024-11-21T23:11:54.386624Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:20:2067] worker 13 2024-11-21T23:11:54.386659Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:21:2068] worker 14 2024-11-21T23:11:54.386685Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:22:2069] worker 15 2024-11-21T23:11:54.386710Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:23:2070] worker 16 2024-11-21T23:11:54.386735Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:24:2071] worker 17 2024-11-21T23:11:54.386759Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:25:2072] worker 18 2024-11-21T23:11:54.386797Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:6:2053] self [2:26:2073] worker 19 Sending message to [2:8:2055] from [2:6:2053] id 1 Sending message to [2:9:2056] from [2:6:2053] id 2 Sending message to [2:10:2057] from [2:6:2053] id 3 Sending message to [2:11:2058] from [2:6:2053] id 4 Sending message to [2:12:2059] from [2:6:2053] id 5 Sending message to [2:13:2060] from [2:6:2053] id 6 Sending message to [2:14:2061] from [2:6:2053] id 7 Sending message to [2:15:2062] from [2:6:2053] id 8 Sending message to [2:16:2063] from [2:6:2053] id ... response node 15 [2:6:2053] 2024-11-21T23:11:55.610818Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 16 [2:6:2053] 2024-11-21T23:11:55.610849Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 16 [2:6:2053] 2024-11-21T23:11:55.610912Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 17 [2:6:2053] 2024-11-21T23:11:55.610932Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 17 [2:6:2053] 2024-11-21T23:11:55.610982Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 18 [2:6:2053] 2024-11-21T23:11:55.611100Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 18 [2:6:2053] 2024-11-21T23:11:55.611148Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 19 [2:6:2053] 2024-11-21T23:11:55.611171Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 19 [2:6:2053] 2024-11-21T23:11:55.611239Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [2:6:2053] 2024-11-21T23:11:55.611281Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [2:6:2053] 2024-11-21T23:11:55.611313Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [2:6:2053] 2024-11-21T23:11:55.611412Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [2:6:2053] 2024-11-21T23:11:55.616446Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [2:6:2053] 2024-11-21T23:11:55.616582Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [2:6:2053] 2024-11-21T23:11:55.621803Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [2:6:2053] 2024-11-21T23:11:55.621969Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [2:6:2053] 2024-11-21T23:11:55.627407Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [2:6:2053] 2024-11-21T23:11:55.627545Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [2:6:2053] 2024-11-21T23:11:55.632484Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [2:6:2053] 2024-11-21T23:11:55.632580Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [2:6:2053] 2024-11-21T23:11:55.637003Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [2:6:2053] 2024-11-21T23:11:55.637101Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [2:6:2053] 2024-11-21T23:11:55.641756Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [2:6:2053] 2024-11-21T23:11:55.641894Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [2:6:2053] 2024-11-21T23:11:55.648011Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [2:6:2053] 2024-11-21T23:11:55.648146Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [2:6:2053] 2024-11-21T23:11:55.653056Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [2:6:2053] 2024-11-21T23:11:55.653196Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [2:6:2053] 2024-11-21T23:11:55.658535Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [2:6:2053] 2024-11-21T23:11:55.658689Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [2:6:2053] 2024-11-21T23:11:55.664384Z node 2 :TABLET_AGGREGATOR INFO: aggregator request processed [2:6:2053] Initiator [2:5:2052] TEST 2 20 duration 1.418652s 2024-11-21T23:11:55.874676Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 [3:6:2053] 2024-11-21T23:11:55.874808Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [3:6:2053] self [3:7:2054] worker 0 Sending message to [3:7:2054] from [3:6:2053] id 1 Sending message to [3:7:2054] from [3:6:2053] id 2 Sending message to [3:7:2054] from [3:6:2053] id 3 Sending message to [3:7:2054] from [3:6:2053] id 4 Sending message to [3:7:2054] from [3:6:2053] id 5 Sending message to [3:7:2054] from [3:6:2053] id 6 Sending message to [3:7:2054] from [3:6:2053] id 7 Sending message to [3:7:2054] from [3:6:2053] id 8 Sending message to [3:7:2054] from [3:6:2053] id 9 Sending message to [3:7:2054] from [3:6:2053] id 10 2024-11-21T23:11:56.582925Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [3:7:2054] 2024-11-21T23:11:56.583002Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [3:7:2054] 2024-11-21T23:11:56.583049Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [3:7:2054] 2024-11-21T23:11:56.583156Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [3:7:2054] 2024-11-21T23:11:56.583239Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [3:7:2054] 2024-11-21T23:11:56.583279Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [3:7:2054] 2024-11-21T23:11:56.583317Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [3:7:2054] 2024-11-21T23:11:56.583352Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [3:7:2054] 2024-11-21T23:11:56.583412Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [3:7:2054] 2024-11-21T23:11:56.583479Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [3:7:2054] 2024-11-21T23:11:56.584056Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [3:7:2054] 2024-11-21T23:11:56.585748Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [3:7:2054] 2024-11-21T23:11:56.619687Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [3:7:2054] 2024-11-21T23:11:56.621525Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [3:7:2054] 2024-11-21T23:11:56.656648Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [3:7:2054] 2024-11-21T23:11:56.658507Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [3:7:2054] 2024-11-21T23:11:56.695313Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [3:7:2054] 2024-11-21T23:11:56.697179Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [3:7:2054] 2024-11-21T23:11:56.730610Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [3:7:2054] 2024-11-21T23:11:56.732426Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [3:7:2054] 2024-11-21T23:11:56.774413Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [3:7:2054] 2024-11-21T23:11:56.776198Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [3:7:2054] 2024-11-21T23:11:56.810104Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [3:7:2054] 2024-11-21T23:11:56.811999Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [3:7:2054] 2024-11-21T23:11:56.846725Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [3:7:2054] 2024-11-21T23:11:56.848609Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [3:7:2054] 2024-11-21T23:11:56.880833Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [3:7:2054] 2024-11-21T23:11:56.882281Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [3:7:2054] 2024-11-21T23:11:56.914013Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [3:7:2054] 2024-11-21T23:11:56.915728Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [3:7:2054] 2024-11-21T23:11:56.961959Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:7:2054] Initiator [3:6:2053] 2024-11-21T23:11:57.272330Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [3:6:2053] 2024-11-21T23:11:57.273085Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [3:6:2053] 2024-11-21T23:11:57.330215Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:6:2053] Initiator [3:5:2052] TEST 2 1 duration 1.659638s 2024-11-21T23:11:57.773130Z node 4 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [4:5:2052] self [4:6:2053] worker 0 Sending message to [4:6:2053] from [4:6:2053] id 1 Sending message to [4:6:2053] from [4:6:2053] id 2 Sending message to [4:6:2053] from [4:6:2053] id 3 Sending message to [4:6:2053] from [4:6:2053] id 4 Sending message to [4:6:2053] from [4:6:2053] id 5 Sending message to [4:6:2053] from [4:6:2053] id 6 Sending message to [4:6:2053] from [4:6:2053] id 7 Sending message to [4:6:2053] from [4:6:2053] id 8 Sending message to [4:6:2053] from [4:6:2053] id 9 Sending message to [4:6:2053] from [4:6:2053] id 10 2024-11-21T23:11:58.518000Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [4:6:2053] 2024-11-21T23:11:58.518079Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [4:6:2053] 2024-11-21T23:11:58.518105Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [4:6:2053] 2024-11-21T23:11:58.518132Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [4:6:2053] 2024-11-21T23:11:58.518232Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [4:6:2053] 2024-11-21T23:11:58.518278Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [4:6:2053] 2024-11-21T23:11:58.518314Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [4:6:2053] 2024-11-21T23:11:58.518363Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [4:6:2053] 2024-11-21T23:11:58.518426Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [4:6:2053] 2024-11-21T23:11:58.518479Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [4:6:2053] 2024-11-21T23:11:58.518818Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [4:6:2053] 2024-11-21T23:11:58.520627Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [4:6:2053] 2024-11-21T23:11:58.552332Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [4:6:2053] 2024-11-21T23:11:58.554073Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [4:6:2053] 2024-11-21T23:11:58.586192Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [4:6:2053] 2024-11-21T23:11:58.587853Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [4:6:2053] 2024-11-21T23:11:58.617818Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [4:6:2053] 2024-11-21T23:11:58.619158Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [4:6:2053] 2024-11-21T23:11:58.646285Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [4:6:2053] 2024-11-21T23:11:58.647977Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [4:6:2053] 2024-11-21T23:11:58.682342Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [4:6:2053] 2024-11-21T23:11:58.683922Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [4:6:2053] 2024-11-21T23:11:58.708144Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [4:6:2053] 2024-11-21T23:11:58.710039Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [4:6:2053] 2024-11-21T23:11:58.735273Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [4:6:2053] 2024-11-21T23:11:58.736554Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [4:6:2053] 2024-11-21T23:11:58.761871Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [4:6:2053] 2024-11-21T23:11:58.763251Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [4:6:2053] 2024-11-21T23:11:58.794674Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [4:6:2053] 2024-11-21T23:11:58.796487Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [4:6:2053] 2024-11-21T23:11:58.858346Z node 4 :TABLET_AGGREGATOR INFO: aggregator request processed [4:6:2053] Initiator [4:5:2052] TEST 2 1 duration 1.484034s >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] |82.9%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlap::Decimal [GOOD] >> TOlap::CreateTable [GOOD] >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:11:59.759428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:11:59.759568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:11:59.759616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:11:59.759668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:11:59.759717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:11:59.759749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:11:59.759830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:11:59.760305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:11:59.826847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:11:59.826903Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:11:59.837181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:11:59.839872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:11:59.840048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:11:59.846172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:11:59.846883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:11:59.847572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:11:59.847915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:11:59.852574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:11:59.854034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:11:59.854098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:11:59.854328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:11:59.854386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:11:59.854451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:11:59.854552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:11:59.862777Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:11:59.977954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:11:59.978226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:59.978468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:11:59.978735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:11:59.978799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:59.981439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:11:59.981576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:11:59.981792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:59.981876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:11:59.981934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:11:59.981978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:11:59.984263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:59.984324Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:11:59.984363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:11:59.986363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:59.986438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:59.986487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:11:59.986537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:11:59.990494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:11:59.992757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:11:59.992948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:11:59.993955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:11:59.994141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:11:59.994196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:11:59.994517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:11:59.994586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:11:59.994781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:11:59.994879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:11:59.997297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:11:59.997414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:11:59.997616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:11:59.997659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:11:59.998015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:11:59.998068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:11:59.998191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:11:59.998229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:11:59.998273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:11:59.998315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:11:59.998370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:00.000703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:00.000824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:00.000882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:00.000920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:00.003118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.003252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.003292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:00.003368Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:00.003491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.003623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.485014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:00.485079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TPropose operationId#107:0 HandleReply TEvOperationPlan at schemeshard: 72057594046678944, stepId: 5000007 2024-11-21T23:12:00.485199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 129 2024-11-21T23:12:00.485326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.485414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: Erasing txId 107 2024-11-21T23:12:00.491161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.491197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.491357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:12:00.491509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.491553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 1 2024-11-21T23:12:00.491596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 107, path id: 2 2024-11-21T23:12:00.491830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.491880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId#107:0 ProgressState at schemeshard: 72057594046678944 2024-11-21T23:12:00.491938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId#107:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T23:12:00.492275Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T23:12:00.492359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T23:12:00.492385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T23:12:00.492420Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T23:12:00.492454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:12:00.492737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T23:12:00.492781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 107 2024-11-21T23:12:00.492805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2024-11-21T23:12:00.492824Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T23:12:00.492859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:00.492907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2024-11-21T23:12:00.494743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 107:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T23:12:00.494831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 107, partId: 0, tablet: 72075186233409546 2024-11-21T23:12:00.495431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 107 2024-11-21T23:12:00.495468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:00.495560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 107 2024-11-21T23:12:00.495596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 130 2024-11-21T23:12:00.496365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T23:12:00.497019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2024-11-21T23:12:00.497375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.497506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.497566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId#107:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:00.497623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:12:00.497740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T23:12:00.497795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T23:12:00.497839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T23:12:00.497876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T23:12:00.497910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T23:12:00.497937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T23:12:00.498041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:12:00.500306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:12:00.500765Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T23:12:00.503700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.504397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409546 2024-11-21T23:12:00.505025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:12:00.505071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:12:00.505130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.507557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T23:12:00.507638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T23:12:00.507810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2024-11-21T23:12:00.508169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2024-11-21T23:12:00.508238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2024-11-21T23:12:00.508827Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2024-11-21T23:12:00.508949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T23:12:00.509006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:652:2630] TestWaitNotification: OK eventTxId 107 2024-11-21T23:12:00.509682Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:12:00.509928Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 235us result status StatusPathDoesNotExist 2024-11-21T23:12:00.510101Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:12:00.510890Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2024-11-21T23:12:00.510994Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 99us result status StatusPathDoesNotExist 2024-11-21T23:12:00.511092Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpRm::SnapshotSharingByExchanger [GOOD] >> TOlap::CreateStore [GOOD] >> TOlap::CreateTableTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1392:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1395:2057] recipient: [4:1394:3416] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:1396:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:1397:3417] sender: [4:1398:2057] recipient: [4:1394:3416] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:1397:3417] Leader for TabletID 72057594037927937 is [4:1397:3417] sender: [4:1467:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1397:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1400:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:1401:2057] recipient: [5:1399:3421] Leader for TabletID 72057594037927937 is [5:1402:3422] sender: [5:1403:2057] recipient: [5:1399:3421] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:1402:3422] Leader for TabletID 72057594037927937 is [5:1402:3422] sender: [5:1472:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1397:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1400:2057] recipient: [6:1399:3421] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:1401:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:1402:3422] sender: [6:1403:2057] recipient: [6:1399:3421] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:1402:3422] Leader for TabletID 72057594037927937 is [6:1402:3422] sender: [6:1472:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1400:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1403:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:1404:2057] recipient: [7:1402:3423] Leader for TabletID 72057594037927937 is [7:1405:3424] sender: [7:1406:2057] recipient: [7:1402:3423] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:1405:3424] Leader for TabletID 72057594037927937 is [7:1405:3424] sender: [7:1475:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1402:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1405:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:1406:2057] recipient: [8:1404:3425] Leader for TabletID 72057594037927937 is [8:1407:3426] sender: [8:1408:2057] recipient: [8:1404:3425] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:1407:3426] Leader for TabletID 72057594037927937 is [8:1407:3426] sender: [8:1477:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1402:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1405:2057] recipient: [9:1404:3425] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:1406:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:1407:3426] sender: [9:1408:2057] recipient: [9:1404:3425] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:1407:3426] Leader for TabletID 72057594037927937 is [9:1407:3426] sender: [9:1477:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1405:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1407:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:1409:2057] recipient: [10:1408:3427] Leader for TabletID 72057594037927937 is [10:1410:3428] sender: [10:1411:2057] recipient: [10:1408:3427] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:1410:3428] Leader for TabletID 72057594037927937 is [10:1410:3428] sender: [10:1480:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:141:2057] recipient: [13:97:2132] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:144:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:145:2057] recipient: [13:143:2166] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:147:2057] recipient: [13:143:2166] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:146:2167] Leader for TabletID 72057594037927937 is [13:146:2167] sender: [13:216:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:141:2057] recipient: [14:97:2132] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:144:2057] recipient: [14:143:2166] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:145:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:147:2057] recipient: [14:143:2166] !Reboot 72057594037927937 (actor [14:105:2137]) rebooted! !Reboot 72057594037927937 (actor [14:105:2137]) tablet resolver refreshed! new actor is[14:146:2167] Leader for TabletID 72057594037927937 is [14:146:2167] sender: [14:216:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:268:2057] recipient: [15:97:2132] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:271:2057] recipient: [15:270:2292] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:272:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:273:2293] sender: [15:274:2057] recipient: [15:270:2292] !Reboot 72057594037927937 (actor [15:105:2137]) rebooted! !Reboot 72057594037927937 (actor [15:105:2137]) tablet resolver refreshed! new actor is[15:273:2293] Leader for TabletID 72057594037927937 is [15:273:2293] sender: [15:343:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:273:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:276:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:277:2057] recipient: [16:275:2297] Leader for TabletID 72057594037927937 is [16:278:2298] sender: [16:279:2057] recipient: [16:275:2297] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:278:2298] Leader for TabletID 72057594037927937 is [16:278:2298] sender: [16:348:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:273:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:275:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:277:2057] recipient: [17:276:2297] Leader for TabletID 72057594037927937 is [17:278:2298] sender: [17:279:2057] recipient: [17:276:2297] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:278:2298] Leader for TabletID 72057594037927937 is [17:278:2298] sender: [17:348:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:276:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:279:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:280:2057] recipient: [18:278:2299] Leader for TabletID 72057594037927937 is [18:281:2300] sender: [18:282:2057] recipient: [18:278:2299] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:281:2300] Leader for TabletID 72057594037927937 is [18:281:2300] sender: [18:329:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:278:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:281:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:282:2057] recipient: [19:280:2301] Leader for TabletID 72057594037927937 is [19:283:2302] sender: [19:284:2057] recipient: [19:280:2301] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:283:2302] Leader for TabletID 72057594037927937 is [19:283:2302] sender: [19:353:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:278:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:281:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:282:2057] recipient: [20:280:2301] Leader for TabletID 72057594037927937 is [20:283:2302] sender: [20:284:2057] recipient: [20:280:2301] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:283:2302] Leader for TabletID 72057594037927937 is [20:283:2302] sender: [20:353:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:281:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:284:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:285:2057] recipient: [21:283:2303] Leader for TabletID 72057594037927937 is [21:286:2304] sender: [21:287:2057] recipient: [21:283:2303] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:286:2304] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] >> TOlap::CreateStoreWithDirs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:00.357192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:00.357288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.357325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:00.357377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:00.357439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:00.357466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:00.357524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.357819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:00.437506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:00.437564Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:00.448507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:00.452271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:00.452421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:00.459255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:00.459884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:00.460703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.460959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:00.465078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.466440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.466512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.466726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:00.466777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.466820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:00.466912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.473710Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:00.601743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:00.602033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.602267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:00.602538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:00.602612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.605203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.605331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:00.605538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.605615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:00.605670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:00.605714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:00.607664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.607724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:00.607761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:00.609470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.609524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.609569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.609613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.613478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:00.615533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:00.615724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:00.616691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.616867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:00.616924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.617259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:00.617322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.617525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.617620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:00.619668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.619731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.619966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.620011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:00.620312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.620361Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:00.620485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:00.620523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.620568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:00.620610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.620664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:00.620703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:00.620773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:00.620815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:00.620852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:00.623111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.623231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.623272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:00.623340Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:00.623383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.623506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ode 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2024-11-21T23:12:00.828702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.828825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:00.828869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId#101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2024-11-21T23:12:00.829013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2024-11-21T23:12:00.829125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.829183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2024-11-21T23:12:00.831197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.831232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.831344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:12:00.831460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.831492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:12:00.831547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T23:12:00.831882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.831934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId#101:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:12:00.831984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId#101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T23:12:00.832656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:00.832732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:00.832766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:12:00.832802Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:12:00.832836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:00.833837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:00.833917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:00.833942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:12:00.833965Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:12:00.833996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:12:00.834056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2024-11-21T23:12:00.834926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T23:12:00.836056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:12:00.837281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:12:00.849570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T23:12:00.849629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:00.849740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T23:12:00.849834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T23:12:00.850161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2024-11-21T23:12:00.850205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:00.850335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:12:00.856380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.856990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.857154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.857221Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:12:00.857359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:12:00.857402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:12:00.857456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2024-11-21T23:12:00.857544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 101 2024-11-21T23:12:00.857598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:12:00.857653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:12:00.857696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:12:00.857851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:12:00.860000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:12:00.860078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:364:2344] TestWaitNotification: OK eventTxId 101 2024-11-21T23:12:00.860719Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:12:00.861024Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 318us result status StatusSuccess 2024-11-21T23:12:00.861787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Decimal(35,9)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:00.183793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:00.183894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.183933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:00.183982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:00.184034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:00.184071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:00.184121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.184399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:00.256704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:00.256753Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:00.268926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:00.272931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:00.273113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:00.279029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:00.279565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:00.280235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.280521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:00.284472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.285826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.285886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.286098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:00.286143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.286182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:00.286266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.291958Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:00.409772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:00.409955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.410083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:00.410230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:00.410273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.412836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.413010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:00.413180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.413251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:00.413316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:00.413351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:00.415100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.415153Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:00.415188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:00.416640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.416702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.416736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.416769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.419492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:00.420916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:00.421073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:00.421727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.421889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:00.421952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.422156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:00.422207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.422356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.422454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:00.424215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.424278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.424418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.424455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:00.424718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.424758Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:00.424849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:00.424880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.424917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:00.424957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.425008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:00.425037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:00.425088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:00.425124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:00.425152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:00.426922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.427032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.427071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:00.427141Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:00.427177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.427278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ard: 72057594046678944 2024-11-21T23:12:01.015392Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 111:2 ProgressState 2024-11-21T23:12:01.015515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#111:2 progress is 3/3 2024-11-21T23:12:01.015553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2024-11-21T23:12:01.015602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 111, ready parts: 3/3, is published: true 2024-11-21T23:12:01.015671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 111 2024-11-21T23:12:01.015744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 111 ready parts: 3/3 2024-11-21T23:12:01.015806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:0 2024-11-21T23:12:01.015844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:0 2024-11-21T23:12:01.015917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T23:12:01.015972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:1 2024-11-21T23:12:01.016002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:1 2024-11-21T23:12:01.016037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T23:12:01.016076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 111:2 2024-11-21T23:12:01.016098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 111:2 2024-11-21T23:12:01.016181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2024-11-21T23:12:01.017813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2024-11-21T23:12:01.017864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:621:2599] TestWaitNotification: OK eventTxId 111 2024-11-21T23:12:01.018598Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/DirA/DirB/NestedTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:12:01.018891Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/DirA/DirB/NestedTable" took 322us result status StatusSuccess 2024-11-21T23:12:01.019395Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/DirA/DirB/NestedTable" PathDescription { Self { Name: "NestedTable" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 111 CreateStep: 5000006 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "NestedTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 112 2024-11-21T23:12:01.023070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore/MyDir" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TableWithTiers" Schema { Columns { Name: "timestamp" Type: "Timestamp" } Columns { Name: "data" Type: "Utf8" } KeyColumnNames: "timestamp" } ColumnShardCount: 1 } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:01.023349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/MyDir/TableWithTiers, opId: 112:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.023630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: MyDir, child name: TableWithTiers, child id: [OwnerId: 72057594046678944, LocalPathId: 9], at schemeshard: 72057594046678944 2024-11-21T23:12:01.023679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2024-11-21T23:12:01.023839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 1 2024-11-21T23:12:01.024011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 112:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:01.024042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 112:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.024123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:12:01.024166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2024-11-21T23:12:01.027008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 112, response: Status: StatusAccepted TxId: 112 SchemeshardId: 72057594046678944 PathId: 9, at schemeshard: 72057594046678944 2024-11-21T23:12:01.027180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 112, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/MyDir/ 2024-11-21T23:12:01.027454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.027515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:12:01.027665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2024-11-21T23:12:01.027749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.027803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 3 2024-11-21T23:12:01.027853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 9 2024-11-21T23:12:01.028186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.028255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId#112:0 ProgressState at tabletId# 72057594046678944 2024-11-21T23:12:01.028424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId#112:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2024-11-21T23:12:01.029081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:12:01.029197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:12:01.029235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:12:01.029273Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2024-11-21T23:12:01.029332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:12:01.029980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:12:01.030071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 1 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:12:01.030098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:12:01.030141Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 1 2024-11-21T23:12:01.030170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2024-11-21T23:12:01.030238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T23:12:01.033031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2024-11-21T23:12:01.033186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 0, tablet: 72075186233409546 2024-11-21T23:12:01.034805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T23:12:01.034935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 TestModificationResult got TxId: 112, wait until txId: 112 >> TOlap::CustomDefaultPresets [GOOD] >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2024-11-21T23:11:58.330275Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:11:58.330809Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001bd5/r3tmp/tmpU33akj/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:11:58.331583Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001bd5/r3tmp/tmpU33akj/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001bd5/r3tmp/tmpU33akj/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 849733661544646894 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:11:58.368506Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:58.368775Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2024-11-21T23:11:58.386576Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:449:2098] with ResourceBroker at [2:420:2097] 2024-11-21T23:11:58.386722Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:450:2099] 2024-11-21T23:11:58.386930Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:448:2330] with ResourceBroker at [1:419:2311] 2024-11-21T23:11:58.387012Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:451:2331] 2024-11-21T23:11:58.387161Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:58.387196Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:58.387230Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2024-11-21T23:11:58.387260Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2024-11-21T23:11:58.387440Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.405393Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.405638Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.405726Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.406071Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:58.406233Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2024-11-21T23:11:58.406334Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:58.406366Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.406486Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.406649Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2024-11-21T23:11:58.406680Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:58.406738Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230718 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:11:58.407291Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2024-11-21T23:11:58.407410Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.407841Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.408125Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:58.408414Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2024-11-21T23:11:58.408591Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2024-11-21T23:11:58.408740Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:11:58.408907Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:11:59.372908Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:11:59.372988Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:11:59.373094Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:59.373144Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:59.373191Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:59.373224Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:59.373259Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:448:2330])) 2024-11-21T23:11:59.373394Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:59.373492Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:448:2330]) priority=0 resources={0, 100} 2024-11-21T23:11:59.373530Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:59.373559Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:448:2330]) from queue queue_kqp_resource_manager 2024-11-21T23:11:59.373585Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:448:2330]) to queue queue_kqp_resource_manager 2024-11-21T23:11:59.373613Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:448:2330])) 2024-11-21T23:11:59.373682Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:11:59.373746Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:11:59.373854Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230719 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2024-11-21T23:11:59.374088Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:12:00.116119Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:12:00.116283Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [2:449:2098]) priority=0 resources={0, 100} 2024-11-21T23:12:00.116330Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [2:449:2098]) to queue queue_kqp_resource_manager 2024-11-21T23:12:00.116400Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:449:2098]) from queue queue_kqp_resource_manager 2024-11-21T23:12:00.116438Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [2:449:2098]) to queue queue_kqp_resource_manager 2024-11-21T23:12:00.116501Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:449:2098])) 2024-11-21T23:12:00.116619Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:12:00.116698Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-2-2 (2 by [2:449:2098]) priority=0 resources={0, 100} 2024-11-21T23:12:00.116749Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-2-2 (2 by [2:449:2098]) to queue queue_kqp_resource_manager 2024-11-21T23:12:00.116790Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:449:2098]) from queue queue_kqp_resource_manager 2024-11-21T23:12:00.116829Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-2-2 (2 by [2:449:2098]) to queue queue_kqp_resource_manager 2024-11-21T23:12:00.116868Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:449:2098])) 2024-11-21T23:12:00.116959Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2024-11-21T23:12:00.117050Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:12:00.117189Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230720 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2024-11-21T23:12:00.117532Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:12:00.379613Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:12:00.379751Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:448:2330]) (release resources {0, 100}) 2024-11-21T23:12:00.379840Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350100 (remove task kqp-1-1-1 (1 by [1:448:2330])) 2024-11-21T23:12:00.379899Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200200 2024-11-21T23:12:00.379970Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2024-11-21T23:12:00.380032Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:448:2330]) (release resources {0, 100}) 2024-11-21T23:12:00.380078Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350100 to 0.200200 (remove task kqp-2-1-2 (2 by [1:448:2330])) 2024-11-21T23:12:00.380118Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2024-11-21T23:12:00.380180Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:12:00.380335Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732230721 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:12:00.380669Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2024-11-21T23:12:00.657567Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2024-11-21T23:12:00.657713Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [2:449:2098]) (release resources {0, 100}) 2024-11-21T23:12:00.657792Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:449:2098])) 2024-11-21T23:12:00.657836Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2024-11-21T23:12:00.657885Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2024-11-21T23:12:00.657936Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-2-2-2 (2 by [2:449:2098]) (release resources {0, 100}) 2024-11-21T23:12:00.657970Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:449:2098])) 2024-11-21T23:12:00.658016Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2024-11-21T23:12:00.658110Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2024-11-21T23:12:00.658250Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1732230722 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2024-11-21T23:12:00.660832Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2024-11-21T23:12:00.936683Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |83.0%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:00.592527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:00.592623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.592672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:00.592720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:00.592769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:00.592798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:00.592852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.593134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:00.654814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:00.654871Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:00.665227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:00.669358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:00.669533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:00.675708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:00.676247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:00.676793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.677046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:00.682538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.683723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.683793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.683986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:00.684028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.684064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:00.684144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.689108Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:00.799560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:00.799800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.800002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:00.800195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:00.800256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.802578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.802682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:00.802861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.802932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:00.802976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:00.803012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:00.806798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.806864Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:00.806904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:00.810889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.810961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.811023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.811082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.815047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:00.820234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:00.820458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:00.821427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.821590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:00.821643Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.821929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:00.821985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.822166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.822248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:00.829390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.829468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.829643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.829688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:00.829989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.830041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:00.830162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:00.830202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.830258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:00.830303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.830361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:00.830414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:00.830488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:00.830538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:00.830574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:00.832573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.832739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.832785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:00.832858Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:00.832908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.833015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... indRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:01.294506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 104 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T23:12:01.296486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.296571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.296678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.296724Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T23:12:01.296821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T23:12:01.296855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:12:01.296903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T23:12:01.296973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 104 2024-11-21T23:12:01.297022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:12:01.297059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T23:12:01.297089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T23:12:01.297188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T23:12:01.298779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:12:01.298823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:570:2548] TestWaitNotification: OK eventTxId 104 2024-11-21T23:12:01.299414Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:12:01.299642Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/Table3" took 249us result status StatusSuccess 2024-11-21T23:12:01.300068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/Table3" PathDescription { Self { Name: "Table3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 104 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Version: 1 UseTiering: "Tiering1" } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 105 2024-11-21T23:12:01.303063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { UseTiering: "Tiering1" } ColumnShardCount: 1 } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:01.303305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.303549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T23:12:01.303604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 0 2024-11-21T23:12:01.303750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T23:12:01.303954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:01.304001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.304090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2024-11-21T23:12:01.304143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2024-11-21T23:12:01.306089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusAccepted TxId: 105 SchemeshardId: 72057594046678944 PathId: 6, at schemeshard: 72057594046678944 2024-11-21T23:12:01.306201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2024-11-21T23:12:01.306373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.306441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:12:01.306668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T23:12:01.306773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.306810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 2 2024-11-21T23:12:01.306848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 105, path id: 6 2024-11-21T23:12:01.307193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.307266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId#105:0 ProgressState at tabletId# 72057594046678944 2024-11-21T23:12:01.307409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId#105:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2024-11-21T23:12:01.308084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:12:01.308196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:12:01.308233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T23:12:01.308280Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2024-11-21T23:12:01.308311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2024-11-21T23:12:01.308938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:12:01.309006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 1 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:12:01.309030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T23:12:01.309056Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 1 2024-11-21T23:12:01.309079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T23:12:01.309155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T23:12:01.311042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2024-11-21T23:12:01.311144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 0, tablet: 72075186233409546 2024-11-21T23:12:01.312486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T23:12:01.312695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateStoreWithDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:00.917154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:00.917252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.917292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:00.917353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:00.917422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:00.917455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:00.917513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.917814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:00.984919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:00.984970Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:00.995911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:01.000200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:01.000404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:01.012349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:01.012956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:01.013536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.013758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:01.020247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.021590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.021654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.021852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:01.021901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:01.021939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:01.022036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.028188Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:01.134764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:01.134976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.135176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:01.135399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:01.135459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.137436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.137537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:01.137699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.137763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:01.137811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:01.137847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:01.139467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.139517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:01.139569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:01.141052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.141105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.141150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.141189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.154680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:01.156984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:01.157174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:01.158034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.158196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:01.158262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.158582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:01.158637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.158803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:01.158893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:01.161062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.161120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:01.161321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.161364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:01.161695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.161752Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:01.161859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:01.161896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.161938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:01.161978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.162036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:01.162072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:01.162146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:01.162190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:01.162224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:01.164156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:01.164270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:01.164307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:01.164383Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:01.164433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:01.164545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 01.360644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: false 2024-11-21T23:12:01.361101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:01.361163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:01.361196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:12:01.361226Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:12:01.361265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:01.362519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:01.362596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:01.362623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:12:01.362645Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T23:12:01.362665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:12:01.362997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:01.363070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:01.363100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:12:01.363116Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2024-11-21T23:12:01.363133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:12:01.363675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:01.363731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:12:01.363756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:12:01.363788Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T23:12:01.363805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:12:01.363845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2024-11-21T23:12:01.364734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:2 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T23:12:01.365196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:12:01.366477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:12:01.366526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:12:01.366576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:12:01.389043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T23:12:01.389095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T23:12:01.389220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T23:12:01.389313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 101 MinStep: 0 Step: 5000002 2024-11-21T23:12:01.389627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2024-11-21T23:12:01.389686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2024-11-21T23:12:01.389809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:12:01.392450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:12:01.392828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:12:01.392958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2024-11-21T23:12:01.393017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2024-11-21T23:12:01.393164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 3/3 2024-11-21T23:12:01.393210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T23:12:01.393267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2024-11-21T23:12:01.393374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:373:2353] message: TxId: 101 2024-11-21T23:12:01.393420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2024-11-21T23:12:01.393524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:12:01.393557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:12:01.393640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:12:01.393679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2024-11-21T23:12:01.393699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2024-11-21T23:12:01.393726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:12:01.393750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2024-11-21T23:12:01.393770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2024-11-21T23:12:01.393863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:12:01.395800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:12:01.395877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:374:2354] TestWaitNotification: OK eventTxId 101 2024-11-21T23:12:01.396466Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/DirB/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:12:01.396762Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/DirB/OlapStore" took 268us result status StatusSuccess 2024-11-21T23:12:01.397414Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/DirB/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:00.801308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:00.801389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.801424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:00.801472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:00.801534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:00.801568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:00.801606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.801883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:00.867396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:00.867452Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:00.889099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:00.893012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:00.893196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:00.903470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:00.904130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:00.904749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.905017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:00.909958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.911391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.911466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.911683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:00.911732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.911788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:00.911875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.919707Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:01.047974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:01.048188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.048384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:01.048593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:01.048654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.050851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.050987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:01.051146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.051214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:01.051261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:01.051298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:01.053028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.053073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:01.053108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:01.054772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.054818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.054863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.054907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.064921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:01.066826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:01.067009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:01.068011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.068163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:01.068206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.068450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:01.068504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.068655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:01.068749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:01.070617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.070668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:01.070838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.070874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:01.071153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.071194Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:01.071293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:01.071327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.071367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:01.071404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.071457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:01.071486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:01.071543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:01.071597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:01.071627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:01.073408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:01.073498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:01.073528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:01.073585Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:01.073628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:01.073711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... n: 72057594037968897 2024-11-21T23:12:01.327639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 102:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T23:12:01.327722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 102:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T23:12:01.327795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 2 -> 3 2024-11-21T23:12:01.329440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:12:01.329552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:12:01.331628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.331951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.332003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TConfigureParts operationId#102:0 ProgressState at tabletId# 72057594046678944 2024-11-21T23:12:01.332106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TConfigureParts operationId#102:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409547 2024-11-21T23:12:01.340401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382272 2024-11-21T23:12:01.340545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72075186233409547 2024-11-21T23:12:01.367427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:01.367533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:01.367725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:01.367862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:01.367980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:01.368099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:01.368177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:01.368244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:01.368305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:01.368403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:01.368506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:01.368566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[1:429:2405];tablet_id=72075186233409547;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:01.372951Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:12:01.373169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:12:01.373227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:12:01.373271Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:12:01.373372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:12:01.373415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:12:01.373437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:12:01.373580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:12:01.373619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:12:01.373639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:12:01.373722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:12:01.373753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:12:01.373775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:12:01.373829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:12:01.373857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:12:01.373897Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:12:01.373928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:12:01.373953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:12:01.373971Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:12:01.374255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:12:01.374331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:12:01.374366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:12:01.374588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:12:01.374618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:12:01.374644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:12:01.374743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:12:01.374771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:12:01.374788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:12:01.374911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:12:01.374938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:12:01.374956Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:12:01.375040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:12:01.375071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; TestModificationResult got TxId: 102, wait until txId: 102 >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TOlap::AlterTtl [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:01.107576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:01.107656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:01.107700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:01.107749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:01.107830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:01.107857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:01.107911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:01.108218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:01.180683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:01.180731Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:01.192373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:01.195900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:01.196084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:01.202264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:01.202835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:01.203509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.203801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:01.207980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.209206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.209276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.209498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:01.209544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:01.209583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:01.209661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.215993Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:01.338933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:01.339190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.339389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:01.339585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:01.339643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.341924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.342040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:01.342226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.342296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:01.342342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:01.342379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:01.344160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.344212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:01.344262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:01.345840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.345881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.345924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.345966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.349749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:01.351552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:01.351755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:01.352625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.352777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:01.352823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.353076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:01.353124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.353285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:01.353388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:01.355096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.355146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:01.355312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.355346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:01.355636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.355673Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:01.355763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:01.355806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.355844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:01.355877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.355925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:01.355952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:01.356001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:01.356036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:01.356083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:01.357782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:01.357873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:01.357917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:01.357971Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:01.358003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:01.358126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... fCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:12:01.614044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:12:01.616391Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.616441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:12:01.616643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:12:01.616756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.616787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:12:01.616853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T23:12:01.617105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.617166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId#102:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:12:01.617222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId#102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T23:12:01.617900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:12:01.618001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:12:01.618045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:12:01.618082Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T23:12:01.618123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:12:01.619116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:12:01.619200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:12:01.619231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:12:01.619259Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T23:12:01.619320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:12:01.619390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T23:12:01.620973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T23:12:01.621068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2024-11-21T23:12:01.621133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2024-11-21T23:12:01.621945Z node 1 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2024-11-21T23:12:01.622084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.622193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2024-11-21T23:12:01.623176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:12:01.625246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:12:01.631268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.643735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T23:12:01.643801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:01.643908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T23:12:01.643972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 102 MinStep: 0 Step: 5000003 2024-11-21T23:12:01.644283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2024-11-21T23:12:01.644350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:01.644491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:12:01.649336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.649851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.650084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.650135Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:12:01.650241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:12:01.650269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:12:01.650314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:12:01.650452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2343] message: TxId: 102 2024-11-21T23:12:01.650500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:12:01.650544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:12:01.650577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:12:01.650711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:12:01.661367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:12:01.661493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:418:2397] TestWaitNotification: OK eventTxId 102 2024-11-21T23:12:01.662187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:12:01.662826Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 638us result status StatusSuccess 2024-11-21T23:12:01.663414Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |83.0%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:00.158804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:00.158902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.158956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:00.158999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:00.159044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:00.159074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:00.159134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.159442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:00.234517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:00.234591Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:00.246589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:00.251614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:00.251857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:00.258047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:00.258670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:00.259305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.259564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:00.264595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.265888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.265957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.266173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:00.266218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.266257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:00.266343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.273270Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:00.400316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:00.400593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.400780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:00.400987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:00.401052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.403058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.403178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:00.403347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.403434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:00.403477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:00.403511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:00.405127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.405177Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:00.405213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:00.406624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.406659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.406695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.406727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.415935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:00.419284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:00.419484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:00.420569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:00.420746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:00.420793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.421103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:00.421159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:00.421338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.421427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:00.423475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:00.423532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:00.423686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:00.423713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:00.424026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:00.424070Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:00.424155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:00.424188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.424253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:00.424329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:00.424368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:00.424402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:00.424477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:00.424523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:00.424555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:00.426128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.426218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:00.426255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:00.426331Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:00.426366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:00.426485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... rd: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2024-11-21T23:12:01.974906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:01.975065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2024-11-21T23:12:01.975123Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T23:12:01.975194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.975238Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2024-11-21T23:12:01.977323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.977462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.977507Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#105:0 HandleReply ProgressState at tablet: 72057594046678944 2024-11-21T23:12:01.977582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2024-11-21T23:12:01.977718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:01.979432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2024-11-21T23:12:01.979591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 105 at step: 5000006 2024-11-21T23:12:01.980311Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.980429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:01.980490Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId#105:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000006 2024-11-21T23:12:01.981083Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2024-11-21T23:12:01.981306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:12:01.981373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:12:01.982264Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186233409546;tx_state=execute;fline=manager.cpp:215;path_id=3;tiering_name=Tiering1;event=not_found; 2024-11-21T23:12:01.982368Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186233409546;tx_state=execute;fline=manager.cpp:215;path_id=3;tiering_name=Tiering1;event=not_found; FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2024-11-21T23:12:01.984545Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.984601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:12:01.984818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:12:01.984976Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.985024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 2 2024-11-21T23:12:01.985075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2024-11-21T23:12:01.985434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.985493Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId#105:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:12:01.985567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId#105:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2024-11-21T23:12:01.986263Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:12:01.986361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:12:01.986428Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T23:12:01.986497Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2024-11-21T23:12:01.986548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:12:01.987091Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:12:01.987175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2024-11-21T23:12:01.987207Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2024-11-21T23:12:01.987235Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2024-11-21T23:12:01.987260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:12:01.987323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2024-11-21T23:12:01.989466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2024-11-21T23:12:01.990712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T23:12:01.990821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2024-11-21T23:12:02.013630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T23:12:02.013699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:02.013819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T23:12:02.013889Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# Status: SUCCESS TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 105 MinStep: 0 Step: 5000006 2024-11-21T23:12:02.014291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 105 2024-11-21T23:12:02.014338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409546, partId: 0 2024-11-21T23:12:02.014496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 105 FAKE_COORDINATOR: Erasing txId 105 2024-11-21T23:12:02.017620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:02.017985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:02.018111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2024-11-21T23:12:02.018153Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2024-11-21T23:12:02.018296Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2024-11-21T23:12:02.018336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:12:02.018424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2024-11-21T23:12:02.018508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:334:2314] message: TxId: 105 2024-11-21T23:12:02.018563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2024-11-21T23:12:02.018608Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2024-11-21T23:12:02.018653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2024-11-21T23:12:02.018794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:12:02.020451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2024-11-21T23:12:02.020512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:512:2490] TestWaitNotification: OK eventTxId 105 >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] |83.0%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb >> TxUsage::WriteToTopic_Demo_39 [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:476:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:479:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:480:2057] recipient: [4:478:2500] Leader for TabletID 72057594037927937 is [4:481:2501] sender: [4:482:2057] recipient: [4:478:2500] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:481:2501] Leader for TabletID 72057594037927937 is [4:481:2501] sender: [4:551:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:481:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:483:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:485:2057] recipient: [5:484:2505] Leader for TabletID 72057594037927937 is [5:486:2506] sender: [5:487:2057] recipient: [5:484:2505] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:486:2506] Leader for TabletID 72057594037927937 is [5:486:2506] sender: [5:556:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:481:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:483:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:485:2057] recipient: [6:484:2505] Leader for TabletID 72057594037927937 is [6:486:2506] sender: [6:487:2057] recipient: [6:484:2505] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:486:2506] Leader for TabletID 72057594037927937 is [6:486:2506] sender: [6:556:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:482:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:485:2057] recipient: [7:484:2505] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:486:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:487:2506] sender: [7:488:2057] recipient: [7:484:2505] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:487:2506] Leader for TabletID 72057594037927937 is [7:487:2506] sender: [7:557:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:484:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:487:2057] recipient: [8:486:2507] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:488:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:489:2508] sender: [8:490:2057] recipient: [8:486:2507] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:489:2508] Leader for TabletID 72057594037927937 is [8:489:2508] sender: [8:559:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:484:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:487:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:488:2057] recipient: [9:486:2507] Leader for TabletID 72057594037927937 is [9:489:2508] sender: [9:490:2057] recipient: [9:486:2507] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:489:2508] Leader for TabletID 72057594037927937 is [9:489:2508] sender: [9:559:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:485:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:488:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:489:2057] recipient: [10:487:2507] Leader for TabletID 72057594037927937 is [10:490:2508] sender: [10:491:2057] recipient: [10:487:2507] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:490:2508] Leader for TabletID 72057594037927937 is [10:490:2508] sender: [10:560:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:487:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:489:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:491:2057] recipient: [11:490:2509] Leader for TabletID 72057594037927937 is [11:492:2510] sender: [11:493:2057] recipient: [11:490:2509] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:492:2510] Leader for TabletID 72057594037927937 is [11:492:2510] sender: [11:562:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 927937 is [13:105:2137] sender: [13:492:2057] recipient: [13:490:2509] Leader for TabletID 72057594037927937 is [13:493:2510] sender: [13:494:2057] recipient: [13:490:2509] !Reboot 72057594037927937 (actor [13:105:2137]) rebooted! !Reboot 72057594037927937 (actor [13:105:2137]) tablet resolver refreshed! new actor is[13:493:2510] Leader for TabletID 72057594037927937 is [13:493:2510] sender: [13:563:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:101:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:106:2057] recipient: [14:99:2133] Leader for TabletID 72057594037927937 is [14:105:2137] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:101:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:106:2057] recipient: [15:99:2133] Leader for TabletID 72057594037927937 is [15:105:2137] sender: [15:139:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:101:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:106:2057] recipient: [16:99:2133] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:139:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:141:2057] recipient: [16:97:2132] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:144:2057] recipient: [16:143:2166] Leader for TabletID 72057594037927937 is [16:105:2137] sender: [16:145:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:147:2057] recipient: [16:143:2166] !Reboot 72057594037927937 (actor [16:105:2137]) rebooted! !Reboot 72057594037927937 (actor [16:105:2137]) tablet resolver refreshed! new actor is[16:146:2167] Leader for TabletID 72057594037927937 is [16:146:2167] sender: [16:216:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:101:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:106:2057] recipient: [17:99:2133] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:139:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:141:2057] recipient: [17:97:2132] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:144:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:105:2137] sender: [17:145:2057] recipient: [17:143:2166] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:147:2057] recipient: [17:143:2166] !Reboot 72057594037927937 (actor [17:105:2137]) rebooted! !Reboot 72057594037927937 (actor [17:105:2137]) tablet resolver refreshed! new actor is[17:146:2167] Leader for TabletID 72057594037927937 is [17:146:2167] sender: [17:216:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:101:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:106:2057] recipient: [18:99:2133] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:139:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:476:2057] recipient: [18:97:2132] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:479:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:105:2137] sender: [18:480:2057] recipient: [18:478:2500] Leader for TabletID 72057594037927937 is [18:481:2501] sender: [18:482:2057] recipient: [18:478:2500] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:481:2501] Leader for TabletID 72057594037927937 is [18:481:2501] sender: [18:551:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:481:2057] recipient: [19:97:2132] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:484:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:485:2057] recipient: [19:483:2505] Leader for TabletID 72057594037927937 is [19:486:2506] sender: [19:487:2057] recipient: [19:483:2505] !Reboot 72057594037927937 (actor [19:105:2137]) rebooted! !Reboot 72057594037927937 (actor [19:105:2137]) tablet resolver refreshed! new actor is[19:486:2506] Leader for TabletID 72057594037927937 is [19:486:2506] sender: [19:556:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:481:2057] recipient: [20:97:2132] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:484:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:485:2057] recipient: [20:483:2505] Leader for TabletID 72057594037927937 is [20:486:2506] sender: [20:487:2057] recipient: [20:483:2505] !Reboot 72057594037927937 (actor [20:105:2137]) rebooted! !Reboot 72057594037927937 (actor [20:105:2137]) tablet resolver refreshed! new actor is[20:486:2506] Leader for TabletID 72057594037927937 is [20:486:2506] sender: [20:556:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:482:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:485:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:486:2057] recipient: [21:484:2505] Leader for TabletID 72057594037927937 is [21:487:2506] sender: [21:488:2057] recipient: [21:484:2505] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:487:2506] Leader for TabletID 72057594037927937 is [21:487:2506] sender: [21:535:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:484:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:487:2057] recipient: [22:486:2507] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:488:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:489:2508] sender: [22:490:2057] recipient: [22:486:2507] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:489:2508] Leader for TabletID 72057594037927937 is [22:489:2508] sender: [22:559:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:484:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:487:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:488:2057] recipient: [23:486:2507] Leader for TabletID 72057594037927937 is [23:489:2508] sender: [23:490:2057] recipient: [23:486:2507] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:489:2508] Leader for TabletID 72057594037927937 is [23:489:2508] sender: [23:559:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:485:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:487:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:489:2057] recipient: [24:488:2507] Leader for TabletID 72057594037927937 is [24:490:2508] sender: [24:491:2057] recipient: [24:488:2507] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:490:2508] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TTxLocatorTest::Boot |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TTxLocatorTest::TestAllocateAll >> TTxLocatorTest::TestImposibleSize >> TTxLocatorTest::TestWithReboot >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TTxLocatorTest::TestZeroRange >> TxUsage::WriteToTopic_Demo_40 >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> TTxLocatorTest::TestAllocateAll [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] >> TTxLocatorTest::TestImposibleSize [GOOD] >> TTxLocatorTest::Boot [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TTxLocatorTest::TestZeroRange [GOOD] >> TTxLocatorTest::TestWithReboot [GOOD] |83.1%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2024-11-21T23:12:03.899927Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:12:03.900341Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:12:03.901127Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:12:03.902678Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.903145Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:12:03.912582Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.912650Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.912681Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.912729Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:12:03.912859Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.912943Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:12:03.913037Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:12:03.913528Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#281474976710655 2024-11-21T23:12:03.913998Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.914074Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.914147Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2024-11-21T23:12:03.914180Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2024-11-21T23:12:03.916953Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2107] requested range size#1 2024-11-21T23:12:03.917093Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2024-11-21T23:12:03.917129Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:73:2107] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2024-11-21T23:12:03.904959Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:12:03.905208Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:12:03.905727Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:12:03.906788Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.907113Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:12:03.913729Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.913824Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.913871Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.913934Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:12:03.914093Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.914180Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:12:03.914301Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:12:03.914867Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#281474976710656 2024-11-21T23:12:03.915002Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2024-11-21T23:12:03.918423Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2024-11-21T23:12:03.918805Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2106] requested range size#123456 2024-11-21T23:12:03.919324Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.919379Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.919465Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2024-11-21T23:12:03.919498Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2106] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2024-11-21T23:12:03.919929Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2110] requested range size#281474976587200 2024-11-21T23:12:03.920035Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2024-11-21T23:12:03.920081Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:76:2110] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2024-11-21T23:12:03.920422Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2113] requested range size#246912 2024-11-21T23:12:03.920760Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.920809Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.920888Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2024-11-21T23:12:03.920920Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2113] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2024-11-21T23:12:03.921255Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#281474976340288 2024-11-21T23:12:03.921347Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2024-11-21T23:12:03.921398Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2024-11-21T23:12:03.901332Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:12:03.901698Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:12:03.902442Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:12:03.903615Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.903898Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:12:03.911319Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.911393Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.911428Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.911484Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:12:03.911599Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.911666Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:12:03.911754Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2024-11-21T23:12:03.924230Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:12:03.924516Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:12:03.924995Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:12:03.926028Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.926344Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:12:03.934506Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.934623Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.934681Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.934761Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:12:03.934931Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.935040Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:12:03.935170Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:12:03.936110Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2024-11-21T23:12:03.936519Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2024-11-21T23:12:03.936708Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2024-11-21T23:12:03.936957Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2024-11-21T23:12:03.937125Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#100000 2024-11-21T23:12:03.937315Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.937399Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2024-11-21T23:12:03.937515Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.937641Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.937729Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.937794Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2024-11-21T23:12:03.937890Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.937939Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2024-11-21T23:12:03.938054Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.938119Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2024-11-21T23:12:03.938230Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.938301Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.938371Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.938449Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2024-11-21T23:12:03.938671Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.938795Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2024-11-21T23:12:03.938833Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2024-11-21T23:12:03.938966Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.939044Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2024-11-21T23:12:03.939076Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2024-11-21T23:12:03.939198Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2024-11-21T23:12:03.939226Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2024-11-21T23:12:03.939365Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.939426Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.939476Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2024-11-21T23:12:03.939513Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2024-11-21T23:12:03.939620Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.939694Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2024-11-21T23:12:03.939721Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 400000 to# 500000 2024-11-21T23:12:03.939849Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.939916Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.939958Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2024-11-21T23:12:03.939985Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2024-11-21T23:12:03.940041Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2024-11-21T23:12:03.940056Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2024-11-21T23:12:03.940117Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.940145Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.940183Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2024-11-21T23:12:03.940210Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2024-11-21T23:12:03.940306Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.940327Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2024-11-21T23:12:03.940343Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2024-11-21T23:12:03.940399Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.940439Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2024-11-21T23:12:03.940459Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T23:12:03.945170Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#100000 2024-11-21T23:12:03.945597Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#100000 2024-11-21T23:12:03.945696Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:113:2147] requested range size#100000 2024-11-21T23:12:03.945787Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.945850Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.946060Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#100000 2024-11-21T23:12:03.946224Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:117:2151] requested range size#100000 2024-11-21T23:12:03.946369Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.946445Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.946530Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:15:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:03.946629Z node 1 :TX_ALLOCATOR DEBUG: tablet# ... from# 8200000 Reserved to# 8300000 2024-11-21T23:12:03.999962Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:393:2427] TEvAllocateResult from# 8200000 to# 8300000 2024-11-21T23:12:04.000054Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.000168Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2024-11-21T23:12:04.000189Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:395:2429] TEvAllocateResult from# 8300000 to# 8400000 2024-11-21T23:12:04.000252Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.000311Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2024-11-21T23:12:04.000325Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:397:2431] TEvAllocateResult from# 8400000 to# 8500000 2024-11-21T23:12:04.000370Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.000404Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2024-11-21T23:12:04.000418Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:399:2433] TEvAllocateResult from# 8500000 to# 8600000 2024-11-21T23:12:04.000483Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2024-11-21T23:12:04.000498Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:401:2435] TEvAllocateResult from# 8600000 to# 8700000 2024-11-21T23:12:04.000549Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.000648Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.000694Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2024-11-21T23:12:04.000715Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:403:2437] TEvAllocateResult from# 8700000 to# 8800000 2024-11-21T23:12:04.000776Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2024-11-21T23:12:04.000790Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8800000 to# 8900000 2024-11-21T23:12:04.000812Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2024-11-21T23:12:04.000829Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T23:12:04.003497Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:429:2463] requested range size#100000 2024-11-21T23:12:04.003753Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:431:2465] requested range size#100000 2024-11-21T23:12:04.004016Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2024-11-21T23:12:04.004109Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.004255Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.004343Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2024-11-21T23:12:04.004493Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.004587Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2024-11-21T23:12:04.004654Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.004762Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.004863Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2024-11-21T23:12:04.004940Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.005066Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2024-11-21T23:12:04.005206Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.005307Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2024-11-21T23:12:04.005398Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.005421Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.005520Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.005592Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2024-11-21T23:12:04.005711Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.005782Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.005897Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2024-11-21T23:12:04.005918Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:429:2463] TEvAllocateResult from# 9000000 to# 9100000 2024-11-21T23:12:04.005954Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.006032Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2024-11-21T23:12:04.006114Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.006191Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2024-11-21T23:12:04.006208Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:431:2465] TEvAllocateResult from# 9100000 to# 9200000 2024-11-21T23:12:04.006237Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.006322Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2024-11-21T23:12:04.006337Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9200000 to# 9300000 2024-11-21T23:12:04.006425Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2024-11-21T23:12:04.006463Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9300000 to# 9400000 2024-11-21T23:12:04.006521Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.006716Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2024-11-21T23:12:04.006743Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9400000 to# 9500000 2024-11-21T23:12:04.006790Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.006889Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2024-11-21T23:12:04.006913Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9500000 to# 9600000 2024-11-21T23:12:04.006958Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.007086Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.007198Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2024-11-21T23:12:04.007234Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9600000 to# 9700000 2024-11-21T23:12:04.007335Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2024-11-21T23:12:04.007356Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9700000 to# 9800000 2024-11-21T23:12:04.007426Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.007535Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2024-11-21T23:12:04.007561Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9800000 to# 9900000 2024-11-21T23:12:04.007661Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2024-11-21T23:12:04.007685Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::WriteSmall >> TKeyValueTracingTest::ReadHuge |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2024-11-21T23:12:04.110650Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:12:04.110985Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:12:04.111481Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:12:04.112609Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.112967Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:12:04.120907Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.121020Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.121061Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.121117Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:12:04.121244Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.121331Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:12:04.121430Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:12:04.121873Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#0 2024-11-21T23:12:04.122243Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.122277Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.122327Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2024-11-21T23:12:04.122354Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 0 to# 0 expected SUCCESS >> KqpSystemView::FailResolve ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2024-11-21T23:12:04.011850Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2024-11-21T23:12:04.012270Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2024-11-21T23:12:04.012830Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:12:04.014077Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.014422Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:12:04.021771Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.021843Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.021880Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.021934Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2024-11-21T23:12:04.022058Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.022149Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:12:04.022250Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:12:04.023344Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2024-11-21T23:12:04.023856Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2024-11-21T23:12:04.024117Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2024-11-21T23:12:04.024465Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2024-11-21T23:12:04.024645Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:68:2103] requested range size#100000 2024-11-21T23:12:04.024860Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.024965Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2024-11-21T23:12:04.025112Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.025257Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.025347Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.025437Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2024-11-21T23:12:04.025611Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.025668Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2024-11-21T23:12:04.025782Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.025861Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2024-11-21T23:12:04.025958Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.026032Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.026124Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.026171Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2024-11-21T23:12:04.026304Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.026375Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2024-11-21T23:12:04.026422Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2024-11-21T23:12:04.026537Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.026613Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2024-11-21T23:12:04.026646Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2024-11-21T23:12:04.026761Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2024-11-21T23:12:04.026786Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2024-11-21T23:12:04.026883Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.026934Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.026966Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2024-11-21T23:12:04.026991Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2024-11-21T23:12:04.027071Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.027116Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2024-11-21T23:12:04.027131Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:68:2103] TEvAllocateResult from# 400000 to# 500000 2024-11-21T23:12:04.027192Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.027241Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.027294Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2024-11-21T23:12:04.027323Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 500000 to# 600000 2024-11-21T23:12:04.027393Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2024-11-21T23:12:04.027414Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2024-11-21T23:12:04.027479Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.027510Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.027556Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2024-11-21T23:12:04.027574Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2024-11-21T23:12:04.027660Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.027680Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2024-11-21T23:12:04.027696Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2024-11-21T23:12:04.027774Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.027817Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2024-11-21T23:12:04.027840Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T23:12:04.031670Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2024-11-21T23:12:04.041609Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2024-11-21T23:12:04.042334Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2024-11-21T23:12:04.042438Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2024-11-21T23:12:04.042600Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2024-11-21T23:12:04.042646Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2024-11-21T23:12:04.042678Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2024-11-21T23:12:04.042714Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... IN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.340169Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2024-11-21T23:12:04.340198Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:615:2546] TEvAllocateResult from# 9100000 to# 9200000 2024-11-21T23:12:04.340257Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2024-11-21T23:12:04.340291Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:617:2548] TEvAllocateResult from# 9200000 to# 9300000 2024-11-21T23:12:04.340397Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.340481Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2024-11-21T23:12:04.340510Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:619:2550] TEvAllocateResult from# 9300000 to# 9400000 2024-11-21T23:12:04.340595Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.340711Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2024-11-21T23:12:04.340743Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:621:2552] TEvAllocateResult from# 9400000 to# 9500000 2024-11-21T23:12:04.340868Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.340921Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2024-11-21T23:12:04.340946Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:623:2554] TEvAllocateResult from# 9500000 to# 9600000 2024-11-21T23:12:04.341057Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.341164Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2024-11-21T23:12:04.341188Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:625:2556] TEvAllocateResult from# 9600000 to# 9700000 2024-11-21T23:12:04.341313Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.341402Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.341492Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2024-11-21T23:12:04.341514Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:627:2558] TEvAllocateResult from# 9700000 to# 9800000 2024-11-21T23:12:04.341617Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.341686Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.341750Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2024-11-21T23:12:04.341783Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:629:2560] TEvAllocateResult from# 9800000 to# 9900000 2024-11-21T23:12:04.341886Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.341991Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.342048Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2024-11-21T23:12:04.342071Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:631:2562] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2024-11-21T23:12:04.346338Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2024-11-21T23:12:04.347862Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2024-11-21T23:12:04.348574Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2024-11-21T23:12:04.348624Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2024-11-21T23:12:04.348769Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2024-11-21T23:12:04.348815Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2024-11-21T23:12:04.348862Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2024-11-21T23:12:04.348893Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2024-11-21T23:12:04.348968Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2024-11-21T23:12:04.349035Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2024-11-21T23:12:04.349072Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2024-11-21T23:12:04.349107Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2024-11-21T23:12:04.349139Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2024-11-21T23:12:04.349190Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2024-11-21T23:12:04.349341Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2024-11-21T23:12:04.349396Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2024-11-21T23:12:04.349428Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2024-11-21T23:12:04.349449Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2024-11-21T23:12:04.349471Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2024-11-21T23:12:04.349504Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:5:1:24576:78:0],] 2024-11-21T23:12:04.349539Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2024-11-21T23:12:04.349576Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2024-11-21T23:12:04.349603Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2024-11-21T23:12:04.349626Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2024-11-21T23:12:04.349654Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2024-11-21T23:12:04.349678Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2024-11-21T23:12:04.349931Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2024-11-21T23:12:04.351272Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.354653Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2024-11-21T23:12:04.354883Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2024-11-21T23:12:04.355604Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2024-11-21T23:12:04.355661Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.355797Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2024-11-21T23:12:04.355885Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 >> KqpSysColV1::StreamInnerJoinTables |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk |83.1%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSysColV0::SelectRowAsterisk >> KqpSystemView::NodesRange2 |83.1%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect >> KqpSysColV1::InnerJoinTables >> TxUsage::WriteToTopic_Demo_23_RestartNo [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple >> KqpSysColV0::SelectRange >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit >> KqpSystemView::QueryStatsScan >> KqpSysColV1::StreamSelectRowById >> KqpSysColV1::StreamSelectRowAsterisk >> TKeyValueTracingTest::WriteSmall [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::ReadHuge [FAIL] >> TxUsage::WriteToTopic_Demo_9 [GOOD] >> BasicUsage::ReadWithRestarts [GOOD] >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight >> TxUsage::WriteToTopic_Demo_13 [GOOD] >> KqpSystemView::PartitionStatsRanges >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange >> TxUsage::WriteToTopic_Demo_14 >> KqpSystemView::Join >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionDeadNode >> TxUsage::WriteToTopic_Demo_42 >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> TKeyValueTest::TestRenameToLongKey [GOOD] >> LocalPartition::Basic [GOOD] >> LocalPartition::DescribeBadPartition >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartNo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:217:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:149:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:151:2173] Leader for TabletID 72057594037927937 is [7:154:2174] sender: [7:155:2057] recipient: [7:151:2173] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:154:2174] Leader for TabletID 72057594037927937 is [7:154:2174] sender: [7:224:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:149:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:152:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:151:2173] Leader for TabletID 72057594037927937 is [8:154:2174] sender: [8:155:2057] recipient: [8:151:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:154:2174] Leader for TabletID 72057594037927937 is [8:154:2174] sender: [8:224:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:151:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:155:2057] recipient: [9:153:2175] Leader for TabletID 72057594037927937 is [9:156:2176] sender: [9:157:2057] recipient: [9:153:2175] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:156:2176] Leader for TabletID 72057594037927937 is [9:156:2176] sender: [9:226:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2175] Leader for TabletID 72057594037927937 is [10:156:2176] sender: [10:157:2057] recipient: [10:153:2175] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2176] Leader for TabletID 72057594037927937 is [10:156:2176] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2177] Leader for TabletID 72057594037927937 is [11:158:2178] sender: [11:159:2057] recipient: [11:155:2177] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2178] Leader for TabletID 72057594037927937 is [11:158:2178] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipie ... ] sender: [18:169:2057] recipient: [18:165:2185] !Reboot 72057594037927937 (actor [18:105:2137]) rebooted! !Reboot 72057594037927937 (actor [18:105:2137]) tablet resolver refreshed! new actor is[18:168:2186] Leader for TabletID 72057594037927937 is [18:168:2186] sender: [18:238:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:101:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:106:2057] recipient: [19:99:2133] Leader for TabletID 72057594037927937 is [19:105:2137] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:101:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:106:2057] recipient: [20:99:2133] Leader for TabletID 72057594037927937 is [20:105:2137] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:101:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:106:2057] recipient: [21:99:2133] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:139:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:141:2057] recipient: [21:97:2132] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:144:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:105:2137] sender: [21:145:2057] recipient: [21:143:2166] Leader for TabletID 72057594037927937 is [21:146:2167] sender: [21:147:2057] recipient: [21:143:2166] !Reboot 72057594037927937 (actor [21:105:2137]) rebooted! !Reboot 72057594037927937 (actor [21:105:2137]) tablet resolver refreshed! new actor is[21:146:2167] Leader for TabletID 72057594037927937 is [21:146:2167] sender: [21:216:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:101:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:106:2057] recipient: [22:99:2133] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:139:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:105:2137]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:141:2057] recipient: [22:97:2132] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:144:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:105:2137] sender: [22:145:2057] recipient: [22:143:2166] Leader for TabletID 72057594037927937 is [22:146:2167] sender: [22:147:2057] recipient: [22:143:2166] !Reboot 72057594037927937 (actor [22:105:2137]) rebooted! !Reboot 72057594037927937 (actor [22:105:2137]) tablet resolver refreshed! new actor is[22:146:2167] Leader for TabletID 72057594037927937 is [22:146:2167] sender: [22:216:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:142:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:144:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:146:2057] recipient: [23:145:2166] Leader for TabletID 72057594037927937 is [23:147:2167] sender: [23:148:2057] recipient: [23:145:2166] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:147:2167] Leader for TabletID 72057594037927937 is [23:147:2167] sender: [23:217:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:147:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:149:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:151:2057] recipient: [24:150:2171] Leader for TabletID 72057594037927937 is [24:152:2172] sender: [24:153:2057] recipient: [24:150:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:152:2172] Leader for TabletID 72057594037927937 is [24:152:2172] sender: [24:222:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:147:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:151:2057] recipient: [25:149:2171] Leader for TabletID 72057594037927937 is [25:152:2172] sender: [25:153:2057] recipient: [25:149:2171] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:152:2172] Leader for TabletID 72057594037927937 is [25:152:2172] sender: [25:222:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:148:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:151:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:152:2057] recipient: [26:150:2171] Leader for TabletID 72057594037927937 is [26:153:2172] sender: [26:154:2057] recipient: [26:150:2171] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:153:2172] Leader for TabletID 72057594037927937 is [26:153:2172] sender: [26:223:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:153:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:156:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:157:2057] recipient: [27:155:2176] Leader for TabletID 72057594037927937 is [27:158:2177] sender: [27:159:2057] recipient: [27:155:2176] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:158:2177] Leader for TabletID 72057594037927937 is [27:158:2177] sender: [27:228:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:156:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:155:2176] Leader for TabletID 72057594037927937 is [28:158:2177] sender: [28:159:2057] recipient: [28:155:2176] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2177] Leader for TabletID 72057594037927937 is [28:158:2177] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:154:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:157:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:158:2057] recipient: [29:156:2176] Leader for TabletID 72057594037927937 is [29:159:2177] sender: [29:160:2057] recipient: [29:156:2176] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:159:2177] Leader for TabletID 72057594037927937 is [29:159:2177] sender: [29:229:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] >> KqpSysColV1::SelectRowById >> TxUsage::WriteToTopic_Demo_30 [GOOD] >> KqpSystemView::FailResolve [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:00.997277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:00.997357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.997404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:00.997443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:00.997490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:00.997516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:00.997563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:00.997828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:01.073257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:01.073307Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:01.092919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:01.097295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:01.097477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:01.103842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:01.104473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:01.105083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.105336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:01.109586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.110853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.110917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.111110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:01.111160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:01.111215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:01.111322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.117125Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:01.216237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:01.216430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.216563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:01.216731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:01.216782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.218565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.218651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:01.218785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.218864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:01.218892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:01.218920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:01.220322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.220356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:01.220379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:01.221530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.221563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.221595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.221623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.224454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:01.225737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:01.225862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:01.226505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:01.226666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:01.226721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.226902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:01.226934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:01.227088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:01.227154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:01.228536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:01.228574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:01.228683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:01.228710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:01.228953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:01.228995Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:01.229084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:01.229114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.229172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:01.229228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:01.229262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:01.229293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:01.229348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:01.229404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:01.229433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:01.230883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:01.230969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:01.231001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:01.231043Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:01.231070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:01.231139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... MESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:50 tabletId 72075186233409595 2024-11-21T23:12:09.441162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:12:09.441215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:12:09.441291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:12:09.441470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:52 2024-11-21T23:12:09.441497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:52 tabletId 72075186233409597 2024-11-21T23:12:09.441629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:46 2024-11-21T23:12:09.441656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:46 tabletId 72075186233409591 2024-11-21T23:12:09.441736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:48 2024-11-21T23:12:09.441784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:48 tabletId 72075186233409593 2024-11-21T23:12:09.441858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2024-11-21T23:12:09.441892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2024-11-21T23:12:09.441983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:44 2024-11-21T23:12:09.442027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:44 tabletId 72075186233409589 2024-11-21T23:12:09.442095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:65 2024-11-21T23:12:09.442116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:65 tabletId 72075186233409610 2024-11-21T23:12:09.442186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:63 2024-11-21T23:12:09.442210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:63 tabletId 72075186233409608 2024-11-21T23:12:09.442307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:61 2024-11-21T23:12:09.442333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:61 tabletId 72075186233409606 2024-11-21T23:12:09.446887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:59 2024-11-21T23:12:09.446929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2024-11-21T23:12:09.447780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T23:12:09.447813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2024-11-21T23:12:09.448312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T23:12:09.448343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T23:12:09.449384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T23:12:09.449416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T23:12:09.449536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2024-11-21T23:12:09.449570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2024-11-21T23:12:09.449663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2024-11-21T23:12:09.449684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2024-11-21T23:12:09.449738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2024-11-21T23:12:09.449760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2024-11-21T23:12:09.449885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2024-11-21T23:12:09.449914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2024-11-21T23:12:09.451180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2024-11-21T23:12:09.451216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2024-11-21T23:12:09.451297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2024-11-21T23:12:09.451322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2024-11-21T23:12:09.451418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2024-11-21T23:12:09.451442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2024-11-21T23:12:09.451508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2024-11-21T23:12:09.451531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2024-11-21T23:12:09.451587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2024-11-21T23:12:09.451620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2024-11-21T23:12:09.451683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2024-11-21T23:12:09.451722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2024-11-21T23:12:09.451826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:40 2024-11-21T23:12:09.451861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2024-11-21T23:12:09.451944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2024-11-21T23:12:09.451967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2024-11-21T23:12:09.459366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2024-11-21T23:12:09.459432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2024-11-21T23:12:09.459545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2024-11-21T23:12:09.459568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2024-11-21T23:12:09.459640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2024-11-21T23:12:09.459663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2024-11-21T23:12:09.459834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2024-11-21T23:12:09.459871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2024-11-21T23:12:09.459979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T23:12:09.460003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2024-11-21T23:12:09.460083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:57 2024-11-21T23:12:09.460107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2024-11-21T23:12:09.460216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:53 2024-11-21T23:12:09.460248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2024-11-21T23:12:09.460315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:55 2024-11-21T23:12:09.460337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2024-11-21T23:12:09.460399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:49 2024-11-21T23:12:09.460427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2024-11-21T23:12:09.460511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:47 2024-11-21T23:12:09.460541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2024-11-21T23:12:09.464836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:51 2024-11-21T23:12:09.464892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2024-11-21T23:12:09.464987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:45 2024-11-21T23:12:09.465010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2024-11-21T23:12:09.465064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:43 2024-11-21T23:12:09.465103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2024-11-21T23:12:09.465247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:41 2024-11-21T23:12:09.465299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2024-11-21T23:12:09.465464Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 2024-11-21T23:12:09.466465Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:12:09.466714Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 324us result status StatusPathDoesNotExist 2024-11-21T23:12:09.466878Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:12:09.467672Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2024-11-21T23:12:09.467780Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 112us result status StatusPathDoesNotExist 2024-11-21T23:12:09.467859Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> KqpSysColV1::SelectRowAsterisk [GOOD] >> KqpSysColV0::SelectRowAsterisk [GOOD] >> TxUsage::WriteToTopic_Demo_31 >> KqpSysColV1::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 5795, MsgBus: 6992 2024-11-21T23:12:05.285875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874315656319034:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:05.285940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0022be/r3tmp/tmpVFOLQQ/pdisk_1.dat 2024-11-21T23:12:05.638981Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5795, node 1 2024-11-21T23:12:05.769568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:05.769680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:05.782604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:05.793789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:05.793812Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:05.793822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:05.793893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6992 TClient is connected to server localhost:6992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:06.290768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.315799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:06.451514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:12:06.592332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.659520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.388526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874328541222616:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:08.388632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:08.651759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.688590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.723485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.792814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.835089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.867100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.915449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874328541223114:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:08.915541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:08.915810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874328541223119:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:08.918918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:08.931448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874328541223121:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:10.286681Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874315656319034:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:10.286743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:10.430619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.637170Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439874337131158098:3677], for# user0@builtin, access# SelectRow 2024-11-21T23:12:10.637460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2024-11-21T23:12:10.656405Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTk0MmY5YmEtZTZlOWI4YmItNWUwMWRiZjYtZTBlMGRiNmE=, ActorId: [1:7439874337131158066:2465], ActorState: ExecuteState, TraceId: 01jd8fymrnd5wbb8nxgh1bvps8, Create QueryResponse for error on request, msg: 2024-11-21T23:12:10.656723Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230730630, txId: 281474976710672] shutting down 2024-11-21T23:12:10.657793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jd8fymrnd5wbb8nxgh1bvps8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk0MmY5YmEtZTZlOWI4YmItNWUwMWRiZjYtZTBlMGRiNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TDataShardTrace::TestTraceDistributedSelect >> KqpSysColV1::StreamInnerJoinSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 24212, MsgBus: 16096 2024-11-21T23:12:05.630692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874315701290559:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:05.630781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002282/r3tmp/tmprHX2QR/pdisk_1.dat 2024-11-21T23:12:06.048974Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24212, node 1 2024-11-21T23:12:06.110907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.111042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.112699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.124548Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:06.124571Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:06.124591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:06.124677Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16096 TClient is connected to server localhost:16096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:06.637485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.657169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.796863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.950318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.018073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.038765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874328586194156:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.038940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.077215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.332323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.406783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.436036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.467846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.546513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.592217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874332881161956:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.592302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.592565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874332881161961:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.595711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:09.606829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874332881161963:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:10.631153Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874315701290559:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:10.631226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 8372, MsgBus: 64702 2024-11-21T23:12:05.385708Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874314298304109:2202];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:05.385933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0022ac/r3tmp/tmpe117pL/pdisk_1.dat 2024-11-21T23:12:05.802972Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:05.810846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:05.810953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:05.813136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8372, node 1 2024-11-21T23:12:05.893014Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:05.893041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:05.893052Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:05.893149Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64702 TClient is connected to server localhost:64702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:06.405740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.432478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.555454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.728286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:06.792265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.671705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874327183207529:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:08.671859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:08.888912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.921813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:08.949638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.025957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.064657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.103950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.325631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874331478175328:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.325689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.325849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874331478175333:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.328707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:09.338841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874331478175335:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:10.387376Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874314298304109:2202];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:10.387453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:10.952290Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230730951, txId: 281474976710671] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] >> KqpSysColV1::InnerJoinTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 25515, MsgBus: 16621 2024-11-21T23:12:05.784929Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874313158726417:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:05.784995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002286/r3tmp/tmpcYg4VB/pdisk_1.dat 2024-11-21T23:12:06.123693Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25515, node 1 2024-11-21T23:12:06.190513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.190649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.193727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.199001Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:06.199022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:06.199029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:06.199113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16621 TClient is connected to server localhost:16621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:06.764141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.792913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:06.921366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.098698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.171236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.302750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874326043630013:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.313793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.344639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.376291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.446888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.522756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.559185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.629214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.695692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874330338597813:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.695778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.695894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874330338597818:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.698823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:09.709129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874330338597820:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:10.786596Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874313158726417:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:10.786661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV0::SelectRange [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> KqpSysColV1::StreamSelectRowById [GOOD] >> KqpSysColV0::UpdateAndDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 61009, MsgBus: 23719 2024-11-21T23:12:06.076589Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874320156068983:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:06.076654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002268/r3tmp/tmpeTIqSO/pdisk_1.dat 2024-11-21T23:12:06.387554Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61009, node 1 2024-11-21T23:12:06.400082Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:06.409590Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:06.427458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.427565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.428959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.437043Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:06.437072Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:06.437088Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:06.437196Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23719 TClient is connected to server localhost:23719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:06.988155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.011144Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:07.025830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.159728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.328504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.418038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.527137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874333040972594:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.539741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.574148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.604240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.630120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.659310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.697702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.764102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:09.871876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874333040973097:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.871949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.872165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874333040973102:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:09.883139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:10.046586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874333040973104:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:11.080040Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874320156068983:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:11.080127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:217:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:148:2057] recipient: [4:147:2168] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:150:2057] recipient: [4:147:2168] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:149:2169] Leader for TabletID 72057594037927937 is [4:149:2169] sender: [4:219:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:149:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:153:2057] recipient: [5:151:2173] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:155:2057] recipient: [5:151:2173] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:154:2174] Leader for TabletID 72057594037927937 is [5:154:2174] sender: [5:224:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:149:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:153:2057] recipient: [6:151:2173] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:155:2057] recipient: [6:151:2173] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:154:2174] Leader for TabletID 72057594037927937 is [6:154:2174] sender: [6:224:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:156:2057] recipient: [7:155:2175] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:158:2057] recipient: [7:155:2175] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:157:2176] Leader for TabletID 72057594037927937 is [7:157:2176] sender: [7:205:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:156:2177] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:160:2057] recipient: [8:156:2177] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:159:2178] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:229:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:156:2177] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:160:2057] recipient: [9:156:2177] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:159:2178] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:229:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:160:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:161:2057] recipient: [10:159:2179] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:163:2057] recipient: [10:159:2179] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:162:2180] Leader for TabletID 72057594037927937 is [10:162:2180] sender: [10:210:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:159:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:163:2057] recipient: [11:161:2181] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:165:2057] recipient: [11:161:2181] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:164:2182] Leader for TabletID 72057594037927937 is [11:164:2182] sender: [11:234:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader f ... 37927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:150:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:153:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:154:2057] recipient: [24:152:2173] Leader for TabletID 72057594037927937 is [24:155:2174] sender: [24:156:2057] recipient: [24:152:2173] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:155:2174] Leader for TabletID 72057594037927937 is [24:155:2174] sender: [24:225:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:155:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:158:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:159:2057] recipient: [25:157:2178] Leader for TabletID 72057594037927937 is [25:160:2179] sender: [25:161:2057] recipient: [25:157:2178] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:160:2179] Leader for TabletID 72057594037927937 is [25:160:2179] sender: [25:230:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:155:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:158:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:159:2057] recipient: [26:157:2178] Leader for TabletID 72057594037927937 is [26:160:2179] sender: [26:161:2057] recipient: [26:157:2178] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:160:2179] Leader for TabletID 72057594037927937 is [26:160:2179] sender: [26:230:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:157:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:160:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:161:2057] recipient: [27:159:2179] Leader for TabletID 72057594037927937 is [27:162:2180] sender: [27:163:2057] recipient: [27:159:2179] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:162:2180] Leader for TabletID 72057594037927937 is [27:162:2180] sender: [27:232:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:162:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:165:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:166:2057] recipient: [28:164:2184] Leader for TabletID 72057594037927937 is [28:167:2185] sender: [28:168:2057] recipient: [28:164:2184] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:167:2185] Leader for TabletID 72057594037927937 is [28:167:2185] sender: [28:237:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:162:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:165:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:166:2057] recipient: [29:164:2184] Leader for TabletID 72057594037927937 is [29:167:2185] sender: [29:168:2057] recipient: [29:164:2184] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:167:2185] Leader for TabletID 72057594037927937 is [29:167:2185] sender: [29:237:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:167:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:169:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:171:2057] recipient: [30:170:2188] Leader for TabletID 72057594037927937 is [30:172:2189] sender: [30:173:2057] recipient: [30:170:2188] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:172:2189] Leader for TabletID 72057594037927937 is [30:172:2189] sender: [30:242:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:169:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:172:2057] recipient: [31:171:2190] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:173:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:174:2191] sender: [31:175:2057] recipient: [31:171:2190] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:174:2191] Leader for TabletID 72057594037927937 is [31:174:2191] sender: [31:244:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:169:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:172:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:173:2057] recipient: [32:171:2190] Leader for TabletID 72057594037927937 is [32:174:2191] sender: [32:175:2057] recipient: [32:171:2190] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:174:2191] Leader for TabletID 72057594037927937 is [32:174:2191] sender: [32:244:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:174:2057] recipient: [33:97:2132] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:176:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:178:2057] recipient: [33:177:2194] Leader for TabletID 72057594037927937 is [33:179:2195] sender: [33:180:2057] recipient: [33:177:2194] !Reboot 72057594037927937 (actor [33:105:2137]) rebooted! !Reboot 72057594037927937 (actor [33:105:2137]) tablet resolver refreshed! new actor is[33:179:2195] Leader for TabletID 72057594037927937 is [33:179:2195] sender: [33:249:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:101:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:106:2057] recipient: [34:99:2133] Leader for TabletID 72057594037927937 is [34:105:2137] sender: [34:139:2057] recipient: [34:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:144:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:143:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:143:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:152:2057] recipient: [7:150:2171] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:154:2057] recipient: [7:150:2171] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:153:2172] Leader for TabletID 72057594037927937 is [7:153:2172] sender: [7:223:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:150:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:152:2173] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:156:2057] recipient: [8:152:2173] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:155:2174] Leader for TabletID 72057594037927937 is [8:155:2174] sender: [8:225:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:150:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:152:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:153:2173] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:156:2057] recipient: [9:153:2173] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:155:2174] Leader for TabletID 72057594037927937 is [9:155:2174] sender: [9:225:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:151:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:153:2173] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:157:2057] recipient: [10:153:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:156:2174] Leader for TabletID 72057594037927937 is [10:156:2174] sender: [10:226:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:156:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:157:2057] recipient: [11:155:2175] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:159:2057] recipient: [11:155:2175] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:158:2176] Leader for TabletID 72057594037927937 is [11:158:2176] sender: [11:228:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:101:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:106:2057] recipient: [23:99:2133] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:139:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:147:2057] recipient: [23:97:2132] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:150:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:105:2137] sender: [23:151:2057] recipient: [23:149:2171] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:153:2057] recipient: [23:149:2171] !Reboot 72057594037927937 (actor [23:105:2137]) rebooted! !Reboot 72057594037927937 (actor [23:105:2137]) tablet resolver refreshed! new actor is[23:152:2172] Leader for TabletID 72057594037927937 is [23:152:2172] sender: [23:222:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:101:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:106:2057] recipient: [24:99:2133] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:139:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:148:2057] recipient: [24:97:2132] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:150:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:105:2137] sender: [24:152:2057] recipient: [24:151:2171] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:154:2057] recipient: [24:151:2171] !Reboot 72057594037927937 (actor [24:105:2137]) rebooted! !Reboot 72057594037927937 (actor [24:105:2137]) tablet resolver refreshed! new actor is[24:153:2172] Leader for TabletID 72057594037927937 is [24:153:2172] sender: [24:201:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:101:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:106:2057] recipient: [25:99:2133] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:139:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:150:2057] recipient: [25:97:2132] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:153:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:105:2137] sender: [25:154:2057] recipient: [25:152:2173] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:156:2057] recipient: [25:152:2173] !Reboot 72057594037927937 (actor [25:105:2137]) rebooted! !Reboot 72057594037927937 (actor [25:105:2137]) tablet resolver refreshed! new actor is[25:155:2174] Leader for TabletID 72057594037927937 is [25:155:2174] sender: [25:225:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:101:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:106:2057] recipient: [26:99:2133] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:139:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:150:2057] recipient: [26:97:2132] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:153:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:105:2137] sender: [26:154:2057] recipient: [26:152:2173] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:156:2057] recipient: [26:152:2173] !Reboot 72057594037927937 (actor [26:105:2137]) rebooted! !Reboot 72057594037927937 (actor [26:105:2137]) tablet resolver refreshed! new actor is[26:155:2174] Leader for TabletID 72057594037927937 is [26:155:2174] sender: [26:225:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:101:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:106:2057] recipient: [27:99:2133] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:139:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:151:2057] recipient: [27:97:2132] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:154:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:105:2137] sender: [27:155:2057] recipient: [27:153:2173] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:157:2057] recipient: [27:153:2173] !Reboot 72057594037927937 (actor [27:105:2137]) rebooted! !Reboot 72057594037927937 (actor [27:105:2137]) tablet resolver refreshed! new actor is[27:156:2174] Leader for TabletID 72057594037927937 is [27:156:2174] sender: [27:204:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:101:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:106:2057] recipient: [28:99:2133] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:139:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:153:2057] recipient: [28:97:2132] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:156:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:105:2137] sender: [28:157:2057] recipient: [28:155:2175] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:159:2057] recipient: [28:155:2175] !Reboot 72057594037927937 (actor [28:105:2137]) rebooted! !Reboot 72057594037927937 (actor [28:105:2137]) tablet resolver refreshed! new actor is[28:158:2176] Leader for TabletID 72057594037927937 is [28:158:2176] sender: [28:228:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:101:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:106:2057] recipient: [29:99:2133] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:139:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:153:2057] recipient: [29:97:2132] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:156:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:105:2137] sender: [29:157:2057] recipient: [29:155:2175] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:159:2057] recipient: [29:155:2175] !Reboot 72057594037927937 (actor [29:105:2137]) rebooted! !Reboot 72057594037927937 (actor [29:105:2137]) tablet resolver refreshed! new actor is[29:158:2176] Leader for TabletID 72057594037927937 is [29:158:2176] sender: [29:228:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:101:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:106:2057] recipient: [30:99:2133] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:139:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:154:2057] recipient: [30:97:2132] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:156:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:105:2137] sender: [30:158:2057] recipient: [30:157:2175] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:160:2057] recipient: [30:157:2175] !Reboot 72057594037927937 (actor [30:105:2137]) rebooted! !Reboot 72057594037927937 (actor [30:105:2137]) tablet resolver refreshed! new actor is[30:159:2176] Leader for TabletID 72057594037927937 is [30:159:2176] sender: [30:229:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:101:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:106:2057] recipient: [31:99:2133] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:139:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:159:2057] recipient: [31:97:2132] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:161:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:105:2137] sender: [31:163:2057] recipient: [31:162:2180] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:165:2057] recipient: [31:162:2180] !Reboot 72057594037927937 (actor [31:105:2137]) rebooted! !Reboot 72057594037927937 (actor [31:105:2137]) tablet resolver refreshed! new actor is[31:164:2181] Leader for TabletID 72057594037927937 is [31:164:2181] sender: [31:234:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:101:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:106:2057] recipient: [32:99:2133] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:139:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:159:2057] recipient: [32:97:2132] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:162:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:105:2137] sender: [32:163:2057] recipient: [32:161:2180] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:165:2057] recipient: [32:161:2180] !Reboot 72057594037927937 (actor [32:105:2137]) rebooted! !Reboot 72057594037927937 (actor [32:105:2137]) tablet resolver refreshed! new actor is[32:164:2181] Leader for TabletID 72057594037927937 is [32:164:2181] sender: [32:234:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:101:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:106:2057] recipient: [33:99:2133] Leader for TabletID 72057594037927937 is [33:105:2137] sender: [33:139:2057] recipient: [33:14:2061] >> KqpSystemView::QueryStatsScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 4196, MsgBus: 14562 2024-11-21T23:12:07.021302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874322220033541:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:07.021347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002213/r3tmp/tmpf1mPvy/pdisk_1.dat 2024-11-21T23:12:07.443740Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:07.452336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:07.452429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:07.454762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4196, node 1 2024-11-21T23:12:07.520298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:07.520322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:07.520335Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:07.520415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14562 TClient is connected to server localhost:14562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:08.067089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.091218Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:08.108567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.264554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.482444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.575272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.570804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874335104937117:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.571007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.780239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.814370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.867392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.907045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.934156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.006365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.061305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874339399904915:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.061381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.061562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874339399904920:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.064787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:11.071597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874339399904922:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:12.040101Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874322220033541:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:12.041493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 10148, MsgBus: 24085 2024-11-21T23:12:06.356037Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874319460696829:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:06.356109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002246/r3tmp/tmpMoimOV/pdisk_1.dat 2024-11-21T23:12:06.706681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10148, node 1 2024-11-21T23:12:06.772643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.772753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.775725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.797609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:06.797647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:06.797659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:06.797763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24085 TClient is connected to server localhost:24085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:07.440049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.458264Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:07.492235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.680844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.874679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.950096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.319429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874336640567728:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.338034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.368764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.427431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.474037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.499435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.545597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.626359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.682086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874336640568228:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.682153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.682207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874336640568233:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.687597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:10.698505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874336640568235:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:11.356311Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874319460696829:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:11.356403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 11820, MsgBus: 11814 2024-11-21T23:12:06.685425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874319823385225:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:06.687050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00223e/r3tmp/tmpnNyBrU/pdisk_1.dat 2024-11-21T23:12:07.011854Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:07.046029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:07.046138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11820, node 1 2024-11-21T23:12:07.051829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:07.093466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:07.093491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:07.093506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:07.093588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11814 TClient is connected to server localhost:11814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:07.663446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.691825Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:07.706925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:07.862790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.044791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.114027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.107959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874337003256106:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.108078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.308718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.345437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.378139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.415963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.455119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.553913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:10.666712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874337003256611:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.666839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.667183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874337003256616:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.671361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:10.684258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874337003256618:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:11.684381Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874319823385225:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:11.684455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:12.326781Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230732351, txId: 281474976710671] shutting down >> KqpSystemView::PartitionStatsRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 14976, MsgBus: 21761 2024-11-21T23:12:07.246962Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874322334354959:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:07.247010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002210/r3tmp/tmpei766V/pdisk_1.dat 2024-11-21T23:12:07.641124Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:07.670237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:07.670345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14976, node 1 2024-11-21T23:12:07.672148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:07.711034Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:07.711054Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:07.711064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:07.711173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21761 TClient is connected to server localhost:21761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:08.315064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.340001Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:08.347039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.490043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.658079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.743710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:11.131514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874335219258574:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.142327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.176949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.218376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.294355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.332236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.367381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.440008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.522175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874339514226376:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.522266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.522524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874339514226381:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.525822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:11.535395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874339514226383:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:12.252799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874322334354959:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:12.252866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:12.669351Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230732701, txId: 281474976710671] shutting down >> KqpSysColV1::InnerJoinSelectAsterisk >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpSysColV1::SelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 7734, MsgBus: 10267 2024-11-21T23:12:07.422946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874325029082337:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:07.423017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002208/r3tmp/tmpxJSCqR/pdisk_1.dat 2024-11-21T23:12:07.999302Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:08.003466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:08.003590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:08.006268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7734, node 1 2024-11-21T23:12:08.113845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:08.113867Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:08.113873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:08.113957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10267 TClient is connected to server localhost:10267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:08.825957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.868306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.007466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.325096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.411288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:11.116328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874342208953023:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.116439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.368562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.403849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.439735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.467707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.495754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.557245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.597177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874342208953516:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.597266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.597460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874342208953521:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.601818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:11.612483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874342208953523:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:12.465460Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874325029082337:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:12.465757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:12.814671Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230732855, txId: 281474976710671] shutting down >> KqpSysColV0::SelectRowById >> KqpSystemView::Sessions >> KqpSystemView::ReadSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 24776, MsgBus: 16874 2024-11-21T23:12:07.098372Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874323927298200:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:07.098661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002216/r3tmp/tmpOaHYJl/pdisk_1.dat 2024-11-21T23:12:07.628869Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:07.650951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:07.651080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:07.652941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24776, node 1 2024-11-21T23:12:07.843011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:07.843033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:07.843039Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:07.843147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16874 TClient is connected to server localhost:16874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:08.475054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.511332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:08.522193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.665012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.828603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.910163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.875254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874336812201787:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:10.875366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.136594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.169664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.197459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.236138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.266596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.349466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.401776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874341107169580:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.401873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.402041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874341107169585:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.404844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:11.414557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874341107169587:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:12.100250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874323927298200:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:12.100316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:13.280982Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230732785, txId: 281474976710671] shutting down 2024-11-21T23:12:13.433353Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230733418, txId: 281474976710674] shutting down >> KqpSysColV0::InnerJoinTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 27116, MsgBus: 4166 2024-11-21T23:12:07.988866Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874322754841901:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:07.988930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002204/r3tmp/tmpXwyOuW/pdisk_1.dat 2024-11-21T23:12:08.375725Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:08.405245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:08.406484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27116, node 1 2024-11-21T23:12:08.411030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:08.536257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:08.536281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:08.536289Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:08.536421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4166 TClient is connected to server localhost:4166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:09.124904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.370897Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:09.383489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.531647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.693517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.775869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:11.524633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874339934712789:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.524766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.764243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.788905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.815570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.841894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.863429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.895139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:11.968740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874339934713281:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.968836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.968941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874339934713286:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:11.972877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:11.995362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874339934713288:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:12.989042Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874322754841901:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:12.989133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:13.671465Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230733661, txId: 281474976710671] shutting down |83.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::PartitionStatsRange2 >> KqpSysColV1::UpdateAndDelete >> KqpSystemView::PartitionStatsRange3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 19524, MsgBus: 26336 2024-11-21T23:12:08.846179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874326367591533:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:08.847002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021fe/r3tmp/tmp8vuzEi/pdisk_1.dat 2024-11-21T23:12:09.386803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:09.386905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:09.388502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:09.441389Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19524, node 1 2024-11-21T23:12:09.550834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:09.550862Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:09.550871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:09.550965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26336 TClient is connected to server localhost:26336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:10.285820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.300968Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:10.318861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.444365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.634605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.706926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:12.421990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874343547462282:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.422085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.654166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.688239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.718914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.752781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.786377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.851806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.899291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874343547462779:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.899365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.899368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874343547462784:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.905221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:12.917698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874343547462786:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:13.842518Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874326367591533:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:13.842584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::PartitionStatsSimple >> KqpSystemView::NodesSimple [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> YdbOlapStore::LogCountByResource [GOOD] >> KqpSystemView::NodesRange1 >> KqpSysColV1::StreamSelectRange >> KqpSysColV1::SelectRowById [GOOD] |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xFF66400) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4252 (0xF6F7F1C) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xF703E18) std::__y1::__function::__func, void ()>::operator()()+280 (0xF7157B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0xFF9BD39) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xFF6CF69) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xF714564) NUnitTest::TTestFactory::Execute()+2438 (0xFF6E836) NUnitTest::RunMain(int, char**)+5149 (0xFF9597D) ??+0 (0x7F34062B4D90) __libc_start_main+128 (0x7F34062B4E40) _start+41 (0xD7F7029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xFF66400) TestOneRead(TBasicString>, TBasicString>)+4826 (0xF6FD8BA) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xF70457E) std::__y1::__function::__func, void ()>::operator()()+280 (0xF7157B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0xFF9BD39) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xFF6CF69) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xF714564) NUnitTest::TTestFactory::Execute()+2438 (0xFF6E836) NUnitTest::RunMain(int, char**)+5149 (0xFF9597D) ??+0 (0x7F199B877D90) __libc_start_main+128 (0x7F199B877E40) _start+41 (0xD7F7029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xFF66400) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4252 (0xF6F7F1C) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xF703B08) std::__y1::__function::__func, void ()>::operator()()+280 (0xF7157B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0xFF9BD39) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xFF6CF69) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xF714564) NUnitTest::TTestFactory::Execute()+2438 (0xFF6E836) NUnitTest::RunMain(int, char**)+5149 (0xFF9597D) ??+0 (0x7FF3E8E72D90) __libc_start_main+128 (0x7FF3E8E72E40) _start+41 (0xD7F7029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:123, void TestOneRead(TString, TString): (env.WilsonUploader->BuildTraceTrees()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0xFF66400) TestOneRead(TBasicString>, TBasicString>)+3708 (0xF6FD45C) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xF70418E) std::__y1::__function::__func, void ()>::operator()()+280 (0xF7157B8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0xFF9BD39) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0xFF6CF69) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xF714564) NUnitTest::TTestFactory::Execute()+2438 (0xFF6E836) NUnitTest::RunMain(int, char**)+5149 (0xFF9597D) ??+0 (0x7FFAB2357D90) __libc_start_main+128 (0x7FFAB2357E40) _start+41 (0xD7F7029) >> KqpSystemView::FailNavigate |83.2%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 8738, MsgBus: 23882 2024-11-21T23:12:06.984587Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874321480745604:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:06.984697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:07.020303Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874324956649369:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:07.024279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:07.034803Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439874321827895196:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:07.035124Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00222e/r3tmp/tmpHUddbx/pdisk_1.dat 2024-11-21T23:12:07.791645Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:07.828564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:07.828671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:07.831182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:07.831259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:07.831647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:07.831702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:07.864137Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:12:07.864173Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:12:07.864529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:07.865652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:07.865797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8738, node 1 2024-11-21T23:12:08.215357Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:08.215381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:08.215391Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:08.215488Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23882 TClient is connected to server localhost:23882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:09.360606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.450187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.716538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.268816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.399322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:11.984355Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874321480745604:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:11.984441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:12.019943Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874324956649369:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:12.020008Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:12.033221Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439874321827895196:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:12.033278Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:12.314662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874347250551352:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.315117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.349230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.449035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.526456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.578614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.633491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.753972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:12.897096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874347250552031:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.897166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.897786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874347250552038:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.903782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:12.937721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874347250552040:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:14.494108Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230734463, txId: 281474976710671] shutting down 2024-11-21T23:12:14.640065Z node 3 :BS_PROXY_PUT ERROR: [f6ed510e0e6d1908] Result# TEvPutResult {Id# [72075186224037896:1:15:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:12:14.656534Z node 2 :BS_PROXY_PUT ERROR: [1d0883c873990285] Result# TEvPutResult {Id# [72075186224037893:1:16:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> KqpSystemView::PartitionStatsRange1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 13885, MsgBus: 9607 2024-11-21T23:12:10.925658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874337481883436:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:10.927179Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021f8/r3tmp/tmpo9r2qq/pdisk_1.dat 2024-11-21T23:12:11.290296Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:11.293808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:11.293941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:11.298016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13885, node 1 2024-11-21T23:12:11.422251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:11.422274Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:11.422288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:11.422380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9607 TClient is connected to server localhost:9607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:11.979440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:12.003446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:12.175321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:12.309271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:12.373025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:14.443368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874354661754198:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.460537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.489314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.517381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.550729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.582690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.661278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.698231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.781358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874354661754696:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.781444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.782882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874354661754701:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.789229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:14.800105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874354661754703:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:15.912973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874337481883436:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:15.913053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> KqpSystemView::PartitionStatsParametricRanges >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> TxUsage::WriteToTopic_Demo_40 [GOOD] >> TxUsage::WriteToTopic_Demo_14 [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TxUsage::WriteToTopic_Demo_15 |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |83.3%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSystemView::QueryStatsSimple >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight [GOOD] |83.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 10412, MsgBus: 22277 2024-11-21T23:12:12.838981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874343220659957:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:12.839037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021f4/r3tmp/tmpovTQgK/pdisk_1.dat 2024-11-21T23:12:13.266962Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10412, node 1 2024-11-21T23:12:13.277830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:13.277982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:13.282414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:13.327943Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:13.327964Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:13.327972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:13.328054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22277 TClient is connected to server localhost:22277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:13.951472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:13.990155Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:13.997883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:14.163600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:14.312001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:14.377363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.257197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874360400530847:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:16.264437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:16.300461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.370114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.402697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.432657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.476153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.548197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.621132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874360400531350:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:16.621212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:16.621295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874360400531355:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:16.624124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:16.634992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874360400531357:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:17.840293Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874343220659957:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:17.842071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:18.284247Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230738287, txId: 281474976710671] shutting down >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight |83.3%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> KqpSystemView::PartitionStatsFollower >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] |83.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> KqpSysColV0::UpdateAndDelete [GOOD] >> KqpSystemView::ReadSuccess [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> KqpSysColV0::SelectRowById [GOOD] >> TxUsage::WriteToTopic_Demo_41 >> KqpSystemView::PartitionStatsRange2 [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2024-11-21T23:12:15.937936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:15.941224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:15.941385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024d0/r3tmp/tmpwj9aMQ/pdisk_1.dat 2024-11-21T23:12:16.544444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.592051Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:16.648579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:16.648726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:16.664048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:16.812015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 >> TDataShardTrace::TestTraceDistributedUpsert [GOOD] |83.3%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> KqpSysColV0::InnerJoinTables [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] |83.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 23310, MsgBus: 11041 2024-11-21T23:12:14.774925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874353181710890:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:14.775228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021e7/r3tmp/tmptUNpAR/pdisk_1.dat 2024-11-21T23:12:15.136241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23310, node 1 2024-11-21T23:12:15.170787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:15.170933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:15.172731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:15.255381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:15.255415Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:15.255424Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:15.255528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11041 TClient is connected to server localhost:11041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:15.922040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:15.942344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:16.070637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.228869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.290749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.014366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874370361581644:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.014490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.239142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.300716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.331637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.364645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.395362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.435156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.472980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874370361582137:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.473092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.473253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874370361582142:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.476634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:18.497412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874370361582144:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:19.754601Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874353181710890:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:19.754924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::StreamSelectRange [GOOD] >> KqpSystemView::NodesRange2 [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> KqpSystemView::PartitionStatsSimple [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:141:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:143:2166] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:147:2057] recipient: [4:143:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:146:2167] Leader for TabletID 72057594037927937 is [4:146:2167] sender: [4:216:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:141:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:144:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:145:2057] recipient: [5:143:2166] Leader for TabletID 72057594037927937 is [5:146:2167] sender: [5:147:2057] recipient: [5:143:2166] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:146:2167] Leader for TabletID 72057594037927937 is [5:146:2167] sender: [5:216:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:142:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:145:2057] recipient: [6:144:2166] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:146:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:147:2167] sender: [6:148:2057] recipient: [6:144:2166] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:147:2167] Leader for TabletID 72057594037927937 is [6:147:2167] sender: [6:217:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:144:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:147:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:148:2057] recipient: [7:146:2168] Leader for TabletID 72057594037927937 is [7:149:2169] sender: [7:150:2057] recipient: [7:146:2168] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:149:2169] Leader for TabletID 72057594037927937 is [7:149:2169] sender: [7:219:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:144:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:147:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:148:2057] recipient: [8:146:2168] Leader for TabletID 72057594037927937 is [8:149:2169] sender: [8:150:2057] recipient: [8:146:2168] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:149:2169] Leader for TabletID 72057594037927937 is [8:149:2169] sender: [8:219:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:145:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:148:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:149:2057] recipient: [9:147:2168] Leader for TabletID 72057594037927937 is [9:150:2169] sender: [9:151:2057] recipient: [9:147:2168] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:150:2169] Leader for TabletID 72057594037927937 is [9:150:2169] sender: [9:220:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:150:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:153:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:154:2057] recipient: [10:152:2173] Leader for TabletID 72057594037927937 is [10:155:2174] sender: [10:156:2057] recipient: [10:152:2173] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:155:2174] Leader for TabletID 72057594037927937 is [10:155:2174] sender: [10:225:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:150:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:153:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:154:2057] recipient: [11:152:2173] Leader for TabletID 72057594037927937 is [11:155:2174] sender: [11:156:2057] recipient: [11:152:2173] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:155:2174] Leader for TabletID 72057594037927937 is [11:155:2174] sender: [11:225:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:106:2057] recipient: [12:99:2133] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:139:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:151:2057] recipient: [12:97:2132] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:154:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:105:2137] sender: [12:155:2057] recipient: [12:153:2173] Leader for TabletID 72057594037927937 is [12:156:2174] sender: [12:157:2057] recipient: [12:153:2173] !Reboot 72057594037927937 (actor [12:105:2137]) rebooted! !Reboot 72057594037927937 (actor [12:105:2137]) tablet resolver refreshed! new actor is[12:156:2174] Leader for TabletID 72057594037927937 is [12:156:2174] sender: [12:226:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:101:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:106:2057] recipient: [13:99:2133] Leader for TabletID 72057594037927937 is [13:105:2137] sender: [13:139:2057] recipient: [13:14:2061] |83.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 62936, MsgBus: 1115 2024-11-21T23:12:14.777416Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874352365774098:2100];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:14.779478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021d4/r3tmp/tmpvo65YX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62936, node 1 2024-11-21T23:12:15.245277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:15.245424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:15.252193Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:15.259435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:15.261734Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:15.261752Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:15.402653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:15.402689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:15.402705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:15.402841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1115 TClient is connected to server localhost:1115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:16.050064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.081064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:16.208121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.366909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.441138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.177219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874369545644925:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.177355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.453099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.495824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.529679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.561564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.591914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.628081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.721083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874369545645424:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.721214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.721496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874369545645429:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.724713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:18.752511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874369545645431:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:19.784327Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874352365774098:2100];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:19.784385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 23447, MsgBus: 15135 2024-11-21T23:12:13.413945Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874351568283838:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:13.428454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021e8/r3tmp/tmpn9ccly/pdisk_1.dat 2024-11-21T23:12:13.923761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:13.923894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:13.929453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23447, node 1 2024-11-21T23:12:13.954920Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:14.082948Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:14.082971Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:14.082978Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:14.083095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15135 TClient is connected to server localhost:15135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:14.652702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:14.681880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:14.817969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:14.990982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:15.061647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.679516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874364453187433:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:16.679661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:16.959632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:17.007104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:17.043392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:17.090942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:17.120655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:17.161728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:17.200277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874368748155224:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:17.200360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:17.200525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874368748155229:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:17.203135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:17.212363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874368748155231:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:18.418353Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874351568283838:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:18.418512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 65398, MsgBus: 1376 2024-11-21T23:12:16.073075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874361065922276:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:16.073271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0020a6/r3tmp/tmpRtogbP/pdisk_1.dat 2024-11-21T23:12:16.426635Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65398, node 1 2024-11-21T23:12:16.466283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:16.466426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:16.507975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:16.535322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:16.535347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:16.535359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:16.535469Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1376 TClient is connected to server localhost:1376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:17.239705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.257451Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:17.282144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.423048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.587021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:17.647480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.311938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874373950825717:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.312076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.516425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.555536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.587223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.615004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.689356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.777810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.846747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874373950826216:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.846824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.847064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874373950826221:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.850184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:19.861659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874373950826223:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:21.074137Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874361065922276:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:21.074205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:21.137770Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230741131, txId: 281474976710671] shutting down |83.4%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> TxUsage::WriteToTopic_Demo_20_RestartNo [GOOD] >> TxUsage::WriteToTopic_Demo_31 [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit >> HttpRequest::Status >> KqpSystemView::FailNavigate [GOOD] >> KqpSysColV1::UpdateAndDelete [GOOD] >> BasicStatistics::NotFullStatisticsDatashard >> KqpSystemView::PartitionStatsRange1 [GOOD] |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |83.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 16861, MsgBus: 15599 2024-11-21T23:12:15.068347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874357475724633:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:15.068455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002140/r3tmp/tmpC6mf9H/pdisk_1.dat 2024-11-21T23:12:15.580185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:15.580358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:15.582701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16861, node 1 2024-11-21T23:12:15.631868Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:15.633456Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:15.671391Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:15.731076Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:15.731106Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:15.731122Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:15.731266Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15599 TClient is connected to server localhost:15599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:16.270186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.298172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.440450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.640221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.729741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.411217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874370360628238:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.411312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.672184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.743648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.812882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.847936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.879901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.911111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.982122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874370360628735:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.982198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874370360628740:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.982236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.985683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:18.999253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874370360628742:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:19.977929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.074943Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874357475724633:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:20.075010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:20.154081Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jd8fyy1m7ea6bptk0k0cbap5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjgwMWNiZWUtM2FkNzMyMTMtMjhjZDMwZDMtOThiMjFjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:12:20.163819Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230740153, txId: 281474976710672] shutting down >> TxUsage::WriteToTopic_Demo_32 |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |83.4%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert [GOOD] Test command err: 2024-11-21T23:12:15.875936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:15.879264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:15.879412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024dc/r3tmp/tmpHi1y1c/pdisk_1.dat 2024-11-21T23:12:16.539777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.599357Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:16.656611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:16.656798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:16.668806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:16.810247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.025017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.025142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.025246Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.045680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:12:20.090580Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T23:12:20.315347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:12:21.632981Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fyy1p2ng9tgtpvfh0sa1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc4NmVjNDItNDNkM2E3NWUtNDIzNzc4MjMtZjAxMGE5NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TPQTestSlow::TestWriteVeryBigMessage >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> BasicStatistics::TwoServerlessDbs |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2024-11-21T23:12:15.822346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:15.827648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:15.827875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024cc/r3tmp/tmpDN6JjS/pdisk_1.dat 2024-11-21T23:12:16.550619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.607660Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:16.660078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:16.660220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:16.672062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:16.809866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.024640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.024774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.024893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.051339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:12:20.088941Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T23:12:20.308072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:12:21.632511Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fyy1nbb00bsy8mj94xgsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk4MWFjM2QtNDg4MjhhODQtMmIwNDlkYTUtYTU3NWYzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:12:21.799190Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fyzp8erhjz0rz3pypsmr1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmJhZGE2MjgtZDQ0YzIyMjAtYWY2N2VmM2YtZjRjOGZiZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:12:22.152524Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fyzsx7z6zjds0f2q8k34x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI0MzllNGMtYmQ0MTVmYS01MzZmZTg5MC0yY2M4OTAzYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 64567, MsgBus: 7252 2024-11-21T23:12:16.837385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874361088557862:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:16.837418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002071/r3tmp/tmp5IWOU3/pdisk_1.dat 2024-11-21T23:12:17.190008Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64567, node 1 2024-11-21T23:12:17.255807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:17.257371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:17.262777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:17.280818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:17.280840Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:17.280854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:17.280947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7252 TClient is connected to server localhost:7252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:17.797218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.817650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:17.833697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.976230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.140862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.218334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.898261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874373973461470:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.898359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.174554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.207385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.236397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.312714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.363572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.449258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.525590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874378268429265:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.525712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.530543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874378268429270:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.538343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:20.559198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874378268429272:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:21.838516Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874361088557862:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:21.838607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:21.925231Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230741955, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2024-11-21T23:12:15.822412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:15.827654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:15.827862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002514/r3tmp/tmpplKnBz/pdisk_1.dat 2024-11-21T23:12:16.540130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.594631Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:16.648594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:16.648802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:16.662953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:16.811446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.024643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:895:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.024788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:906:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.024907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.050716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:12:20.089218Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2024-11-21T23:12:20.309975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:909:2749], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:12:21.632776Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8fyy1nf2fjvfb2tw4q8nz3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQ3ZGFkYzUtYWJmZDY4OGMtMWZlNjYyMzUtOTk5YWZiNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:12:21.803288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fyznw45pq3me9g7m2yvah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGE0Y2M0ZTctYjM4NTI0MC04NmNkZjQ1YS1jYzRmYzBiNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:12:22.509035Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8fz00h449d4vevt44nnndk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFlMjc2MTQtZDYzZThjNzctYzFiMThlNC00ODI1NWIwMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 17360, MsgBus: 1853 2024-11-21T23:12:16.498766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874363854999514:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:16.499353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002093/r3tmp/tmpGDuRLj/pdisk_1.dat 2024-11-21T23:12:17.090954Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:17.097682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:17.097750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:17.100018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17360, node 1 2024-11-21T23:12:17.191427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:17.191468Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:17.191488Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:17.191588Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1853 TClient is connected to server localhost:1853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:17.824073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.836738Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:17.851570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:18.012170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.194095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.288050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:20.231798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874381034870358:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.231974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.519134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.591652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.626973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.660726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.730296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.770590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.821643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874381034870856:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.821731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.822126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874381034870861:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.828105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:20.854552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874381034870863:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:21.500322Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874363854999514:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:21.500384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:22.196187Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230742186, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 7684, MsgBus: 30186 2024-11-21T23:12:06.189274Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874321272636457:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:06.189376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:06.235183Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874321130865426:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:06.245811Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439874321402379408:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:06.247582Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:06.264870Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439874320664992380:2193];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:06.497109Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:06.516500Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00227e/r3tmp/tmpWshDLd/pdisk_1.dat 2024-11-21T23:12:06.590035Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:06.855354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.855473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.855695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.855744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.858085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.858140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.858293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.858354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.858692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:06.858738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:06.862069Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T23:12:06.862246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.863275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.865471Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T23:12:06.865529Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:12:06.865583Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:12:06.866445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.866722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.868414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:06.913165Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7684, node 1 2024-11-21T23:12:07.269128Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:07.287448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:07.287469Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:07.287477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:07.287591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30186 TClient is connected to server localhost:30186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:08.600384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:08.664059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:09.002981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.082250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.714760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:11.198920Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874321272636457:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:11.207207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:11.232097Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874321130865426:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:11.232229Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:11.245751Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439874321402379408:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:11.245815Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:11.261693Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439874320664992380:2193];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:11.261765Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:13.386145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874351337409479:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:13.386258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.001505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.205617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.381815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.452619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.545983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.641024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:14.741145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874355632377380:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.741239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.741379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874355632377385:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:14.745594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:14.772603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874355632377387:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:17.727940Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230737698, txId: 281474976710671] shutting down 2024-11-21T23:12:17.946066Z node 2 :BS_PROXY_PUT ERROR: [426484fa2bf5a82f] Result# TEvPutResult {Id# [72075186224037897:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:12:17.956925Z node 3 :BS_PROXY_PUT ERROR: [9bb523288124460e] Result# TEvPutResult {Id# [72075186224037899:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:12:17.963682Z node 5 :BS_PROXY_PUT ERROR: [e6e7a5f3ccca7abe] Result# TEvPutResult {Id# [72075186224037893:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:12:18.003051Z node 4 :BS_PROXY_PUT ERROR: [0d5456486a834c3e] Result# TEvPutResult {Id# [72075186224037891:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> KqpSystemView::Join [GOOD] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TxUsage::WriteToTopic_Demo_42 [GOOD] >> KqpSystemView::Sessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2024-11-21T23:07:39.999890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873172332862921:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:39.999937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dc2/r3tmp/tmpcJ9X5N/pdisk_1.dat 2024-11-21T23:07:40.767581Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:40.785692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:40.785792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:40.790170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16382, node 1 2024-11-21T23:07:41.111747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:41.111764Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:41.111769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:41.111852Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:41.605731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.629027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:41.629086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:41.637456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:41.637650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:41.637662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:41.639912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:41.639937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:41.641573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:41.642626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:41.645359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230461689, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:41.645430Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:41.645678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:41.654418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:41.654612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:41.654660Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:41.654752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:41.654787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:41.654828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:41.663394Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:41.663462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:41.663479Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:41.663600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 self_check_result: GOOD issue_log { id: "YELLOW-9a33-f489" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-9a33-1ba8" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-9a33-1ba8" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-9a33-e9e2-1" reason: "YELLOW-9a33-e9e2-2" reason: "YELLOW-9a33-e9e2-3" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-9a33-e9e2-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-9a33-e9e2-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 1 host: "::1" port: 12001 } 2024-11-21T23:07:46.373305Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439873203119556636:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:46.445799Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:46.486235Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439873204615526700:2148];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:46.498383Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439873201432902171:2104];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dc2/r3tmp/tmpRjpczE/pdisk_1.dat 2024-11-21T23:07:46.774797Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:46.811037Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:07:47.115637Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:47.141215Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:47.141310Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:47.142379Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:47.142452Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:47.143214Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:47.143257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:47.152077Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T23:07:47.152256Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:47.152419Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2024-11-21T23:07:47.155415Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:47.156533Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6053, node 4 2024-11-21T23:07:47.446326Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:47.446350Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:47.446363Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:47.446510Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateSt ... 8684921542:3640], CA [47:7439874348684921528:3632], CA [47:7439874348684921547:3643], CA [47:7439874348684921564:3654], CA [47:7439874348684921533:3635], CA [47:7439874348684921544:3641], CA [47:7439874348684921579:3667], CA [47:7439874348684921574:3662], CA [47:7439874348684921530:3633], CA [47:7439874348684921569:3657], CA [47:7439874348684921577:3665], CA [47:7439874348684921549:3644], CA [47:7439874348684921572:3660], CA [47:7439874348684921566:3655], CA [47:7439874348684921501:3610], CA [47:7439874348684921507:3615], CA [47:7439874348684921554:3647], CA [47:7439874348684921570:3658], CA [47:7439874348684921510:3618], CA [47:7439874348684921505:3613], CA [47:7439874348684921583:3671], CA [47:7439874348684921558:3650], CA [47:7439874348684921536:3637], CA [47:7439874348684921587:3674], CA [47:7439874348684921581:3669], CA [47:7439874348684921555:3648], CA [47:7439874348684921512:3619], CA [47:7439874348684921560:3651], CA [47:7439874348684921515:3622], CA [47:7439874348684921521:3627], CA [47:7439874348684921525:3630], CA [47:7439874348684921519:3625], CA [47:7439874348684921513:3620], 2024-11-21T23:12:13.891804Z node 47 :BLOB_CACHE DEBUG: Miss cache: { Blob: DS:2181038080:[72075186224037889:1:93:31:0:7648:0] Offset: 0 Size: 7648 } sender:[47:7439874348684921823:3764] 2024-11-21T23:12:13.891818Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439874348684921584:3672], TxId: 281474976715773, task: 63. Ctx: { TraceId : 01jd8fyprz6s47eacjg044efss. SessionId : ydb://session/3?node_id=47&id=OWZmZTYwOTMtNjJmYTYwNTYtYzMwNjk3M2EtYjU0NjQyMjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. wakeup with tag 2 2024-11-21T23:12:13.891833Z node 47 :BLOB_CACHE DEBUG: Enqueue read range: { Blob: DS:2181038080:[72075186224037889:1:93:31:0:7648:0] Offset: 0 Size: 7648 } 2024-11-21T23:12:13.891873Z node 47 :BLOB_CACHE DEBUG: Sending read from BlobCache: group: 2181038080 ranges: { Blob: DS:2181038080:[72075186224037889:1:93:31:0:7648:0] Offset: 0 Size: 7648 } cookie: 232 2024-11-21T23:12:13.891875Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439874348684921494:3599] TxId: 281474976715773. Ctx: { TraceId: 01jd8fyprz6s47eacjg044efss, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OWZmZTYwOTMtNjJmYTYwNTYtYzMwNjk3M2EtYjU0NjQyMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [47:7439874348684921571:3659], task: 50, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 853 Tasks { TaskId: 50 CpuTimeUs: 334 ComputeCpuTimeUs: 5 BuildCpuTimeUs: 329 HostName: "ghrun-uc25mmfz34" NodeId: 47 StartTimeMs: 1732230733383 } MaxMemoryUsage: 1048576 } 2024-11-21T23:12:13.891917Z node 47 :BLOB_CACHE DEBUG: Batch read request: { Blob: DS:2181038080:[72075186224037889:1:98:36:0:7648:0] Offset: 0 Size: 7648 } 2024-11-21T23:12:13.891943Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439874348684921584:3672], TxId: 281474976715773, task: 63. Ctx: { TraceId : 01jd8fyprz6s47eacjg044efss. SessionId : ydb://session/3?node_id=47&id=OWZmZTYwOTMtNjJmYTYwNTYtYzMwNjk3M2EtYjU0NjQyMjc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Send stats to executor actor [47:7439874348684921494:3599] TaskId: 63 Stats: CpuTimeUs: 771 Tasks { TaskId: 63 CpuTimeUs: 319 ComputeCpuTimeUs: 4 BuildCpuTimeUs: 315 HostName: "ghrun-uc25mmfz34" NodeId: 47 StartTimeMs: 1732230733390 } MaxMemoryUsage: 1048576 2024-11-21T23:12:13.891951Z node 47 :BLOB_CACHE DEBUG: fline=blob_cache.cpp:234;ask={ Blob: DS:2181038080:[72075186224037889:1:98:36:0:7648:0] Offset: 0 Size: 7648 }; 2024-11-21T23:12:13.891975Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439874348684921575:3663], TxId: 281474976715773, task: 54. Ctx: { SessionId : ydb://session/3?node_id=47&id=OWZmZTYwOTMtNjJmYTYwNTYtYzMwNjk3M2EtYjU0NjQyMjc=. CustomerSuppliedId : . TraceId : 01jd8fyprz6s47eacjg044efss. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. wakeup with tag 2 2024-11-21T23:12:13.892002Z node 47 :BLOB_CACHE DEBUG: Miss cache: { Blob: DS:2181038080:[72075186224037889:1:98:36:0:7648:0] Offset: 0 Size: 7648 } sender:[47:7439874348684921824:3765] 2024-11-21T23:12:13.892003Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439874348684921494:3599] TxId: 281474976715773. Ctx: { TraceId: 01jd8fyprz6s47eacjg044efss, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OWZmZTYwOTMtNjJmYTYwNTYtYzMwNjk3M2EtYjU0NjQyMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [47:7439874348684921561:3652], CA [47:7439874348684921527:3631], CA [47:7439874348684921545:3642], CA [47:7439874348684921580:3668], CA [47:7439874348684921575:3663], CA [47:7439874348684921563:3653], CA [47:7439874348684921531:3634], CA [47:7439874348684921578:3666], CA [47:7439874348684921550:3645], CA [47:7439874348684921567:3656], CA [47:7439874348684921573:3661], CA [47:7439874348684921508:3616], CA [47:7439874348684921503:3611], CA [47:7439874348684921576:3664], CA [47:7439874348684921571:3659], CA [47:7439874348684921506:3614], CA [47:7439874348684921584:3672], CA [47:7439874348684921552:3646], CA [47:7439874348684921509:3617], CA [47:7439874348684921504:3612], CA [47:7439874348684921537:3638], CA [47:7439874348684921582:3670], CA [47:7439874348684921557:3649], CA [47:7439874348684921534:3636], CA [47:7439874348684921586:3673], CA [47:7439874348684921522:3628], CA [47:7439874348684921516:3623], CA [47:7439874348684921540:3639], CA [47:7439874348684921520:3626], CA [47:7439874348684921514:3621], CA [47:7439874348684921524:3629], CA [47:7439874348684921518:3624], CA [47:7439874348684921542:3640], CA [47:7439874348684921528:3632], CA [47:7439874348684921547:3643], CA [47:7439874348684921564:3654], CA [47:7439874348684921533:3635], CA [47:7439874348684921544:3641], CA [47:7439874348684921579:3667], CA [47:7439874348684921574:3662], CA [47:7439874348684921530:3633], CA [47:7439874348684921569:3657], CA [47:7439874348684921577:3665], CA [47:7439874348684921549:3644], CA [47:7439874348684921572:3660], CA [47:7439874348684921566:3655], CA [47:7439874348684921501:3610], CA [47:7439874348684921507:3615], CA [47:7439874348684921554:3647], CA [47:7439874348684921570:3658], CA [47:7439874348684921510:3618], CA [47:7439874348684921505:3613], CA [47:7439874348684921583:3671], CA [47:7439874348684921558:3650], CA [47:7439874348684921536:3637], CA [47:7439874348684921587:3674], CA [47:7439874348684921581:3669], CA [47:7439874348684921555:3648], CA [47:7439874348684921512:3619], CA [47:7439874348684921560:3651], CA [47:7439874348684921515:3622], CA [47:7439874348684921521:3627], CA [47:7439874348684921525:3630], CA [47:7439874348684921519:3625], CA [47:7439874348684921513:3620], 2024-11-21T23:12:13.892047Z node 47 :BLOB_CACHE DEBUG: Enqueue read range: { Blob: DS:2181038080:[72075186224037889:1:98:36:0:7648:0] Offset: 0 Size: 7648 } 2024-11-21T23:12:13.892079Z node 47 :KQP_COMPUTE DEBUG: SelfId: [47:7439874348684921575:3663], TxId: 281474976715773, task: 54. Ctx: { SessionId : ydb://session/3?node_id=47&id=OWZmZTYwOTMtNjJmYTYwNTYtYzMwNjk3M2EtYjU0NjQyMjc=. CustomerSuppliedId : . TraceId : 01jd8fyprz6s47eacjg044efss. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [47:7439874348684921494:3599] TaskId: 54 Stats: CpuTimeUs: 1113 Tasks { TaskId: 54 CpuTimeUs: 300 ComputeCpuTimeUs: 16 BuildCpuTimeUs: 284 HostName: "ghrun-uc25mmfz34" NodeId: 47 StartTimeMs: 1732230733397 } MaxMemoryUsage: 1048576 2024-11-21T23:12:13.892096Z node 47 :BLOB_CACHE DEBUG: Sending read from BlobCache: group: 2181038080 ranges: { Blob: DS:2181038080:[72075186224037889:1:98:36:0:7648:0] Offset: 0 Size: 7648 } cookie: 234 2024-11-21T23:12:13.892107Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439874348684921494:3599] TxId: 281474976715773. Ctx: { TraceId: 01jd8fyprz6s47eacjg044efss, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OWZmZTYwOTMtNjJmYTYwNTYtYzMwNjk3M2EtYjU0NjQyMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [47:7439874348684921572:3660], task: 51, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 897 Tasks { TaskId: 51 CpuTimeUs: 349 ComputeCpuTimeUs: 4 BuildCpuTimeUs: 345 HostName: "ghrun-uc25mmfz34" NodeId: 47 StartTimeMs: 1732230733384 } MaxMemoryUsage: 1048576 } 2024-11-21T23:12:13.892148Z node 47 :BLOB_CACHE DEBUG: Batch read request: { Blob: DS:2181038080:[72075186224037889:1:83:21:0:7648:0] Offset: 0 Size: 7648 } 2024-11-21T23:12:13.892185Z node 47 :BLOB_CACHE DEBUG: fline=blob_cache.cpp:234;ask={ Blob: DS:2181038080:[72075186224037889:1:83:21:0:7648:0] Offset: 0 Size: 7648 }; 2024-11-21T23:12:13.892200Z node 47 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2024-11-21T23:12:13.892217Z node 47 :BLOB_CACHE DEBUG: Miss cache: { Blob: DS:2181038080:[72075186224037889:1:83:21:0:7648:0] Offset: 0 Size: 7648 } sender:[47:7439874348684921825:3766] 2024-11-21T23:12:13.892227Z node 47 :KQP_EXECUTER DEBUG: ActorId: [47:7439874348684921494:3599] TxId: 281474976715773. Ctx: { TraceId: 01jd8fyprz6s47eacjg044efss, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=47&id=OWZmZTYwOTMtNjJmYTYwNTYtYzMwNjk3M2EtYjU0NjQyMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [47:7439874348684921561:3652], CA [47:7439874348684921527:3631], CA [47:7439874348684921545:3642], CA [47:7439874348684921580:3668], CA [47:7439874348684921575:3663], CA [47:7439874348684921563:3653], CA [47:7439874348684921531:3634], CA [47:7439874348684921578:3666], CA [47:7439874348684921550:3645], CA [47:7439874348684921567:3656], CA [47:7439874348684921573:3661], CA [47:7439874348684921508:3616], CA [47:7439874348684921503:3611], CA [47:7439874348684921576:3664], CA [47:7439874348684921571:3659], CA [47:7439874348684921506:3614], CA [47:7439874348684921584:3672], CA [47:7439874348684921552:3646], CA [47:7439874348684921509:3617], CA [47:7439874348684921504:3612], CA [47:7439874348684921537:3638], CA [47:7439874348684921582:3670], CA [47:7439874348684921557:3649], CA [47:7439874348684921534:3636], CA [47:7439874348684921586:3673], CA [47:7439874348684921522:3628], CA [47:7439874348684921516:3623], CA [47:7439874348684921540:3639], CA [47:7439874348684921520:3626], CA [47:7439874348684921514:3621], CA [47:7439874348684921524:3629], CA [47:7439874348684921518:3624], CA [47:7439874348684921542:3640], CA [47:7439874348684921528:3632], CA [47:7439874348684921547:3643], CA [47:7439874348684921564:3654], CA [47:7439874348684921533:3635], CA [47:7439874348684921544:3641], CA [47:7439874348684921579:3667], CA [47:7439874348684921574:3662], CA [47:7439874348684921530:3633], CA [47:7439874348684921569:3657], CA [47:7439874348684921577:3665], CA [47:7439874348684921549:3644], CA [47:7439874348684921572:3660], CA [47:7439874348684921566:3655], CA [47:7439874348684921501:3610], CA [47:7439874348684921507:3615], CA [47:7439874348684921554:3647], CA [47:7439874348684921570:3658], CA [47:7439874348684921510:3618], CA [47:7439874348684921505:3613], CA [47:7439874348684921583:3671], CA [47:7439874348684921558:3650], CA [47:7439874348684921536:3637], CA [47:7439874348684921587:3674], CA [47:7439874348684921581:3669], CA [47:7439874348684921555:3648], CA [47:7439874348684921512:3619], CA [47:7439874348684921560:3651], CA [47:7439874348684921515:3622], CA [47:7439874348684921521:3627], CA [47:7439874348684921525:3630], CA [47:7439874348684921519:3625], CA [47:7439874348684921513:3620], 2024-11-21T23:12:13.892248Z node 47 :BLOB_CACHE DEBUG: Enqueue read range: { Blob: DS:2181038080:[72075186224037889:1:83:21:0:7648:0] Offset: 0 Size: 7648 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 13258, MsgBus: 7572 2024-11-21T23:12:15.062411Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874357256675046:2205];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:15.062632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021ce/r3tmp/tmpqFsUr7/pdisk_1.dat 2024-11-21T23:12:15.588945Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:15.602239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:15.602357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:15.607306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13258, node 1 2024-11-21T23:12:15.700517Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:15.700549Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:15.700564Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:15.700679Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7572 TClient is connected to server localhost:7572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:16.250257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.265806Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:16.279132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.404524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:16.548634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.629345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.641364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874370141578508:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.641482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.916808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.948381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.977020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.005828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.033209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.069443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.125533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874374436546299:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.125620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.125829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874374436546304:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.129024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:19.142338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874374436546306:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:20.062015Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874357256675046:2205];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:20.062079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |83.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 7276, MsgBus: 8819 2024-11-21T23:12:16.262802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874363928459359:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:16.263088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00209d/r3tmp/tmpC0ETwE/pdisk_1.dat 2024-11-21T23:12:16.661049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:16.661155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:16.663848Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:16.667302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7276, node 1 2024-11-21T23:12:16.830926Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:16.830948Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:16.830954Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:16.831053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8819 TClient is connected to server localhost:8819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:17.555989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.576592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.711264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.870286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.964820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.508581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874376813362803:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.508666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.740875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.771662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.804668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.877925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.914964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.974712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.105207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874381108330597:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.105326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.105615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874381108330602:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.110442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:20.123282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874381108330604:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:21.265518Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874363928459359:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:21.266120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:21.609174Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230741600, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 8569, MsgBus: 13409 2024-11-21T23:12:15.978290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874356927798933:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:15.978346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0020a7/r3tmp/tmpbP8f6T/pdisk_1.dat 2024-11-21T23:12:16.316035Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8569, node 1 2024-11-21T23:12:16.384519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:16.384644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:16.385983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:16.390659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:16.390679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:16.390686Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:16.390819Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13409 TClient is connected to server localhost:13409 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:17.021275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.037865Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:17.045707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.173460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:17.342141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:17.408861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.377027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874374107669820:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.377163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.728112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.755275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.859659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.892579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.922669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.966116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:20.035562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874378402637614:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.035649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.035902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874378402637619:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:20.039862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:20.053315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874378402637621:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:20.981603Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874356927798933:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:20.981673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 22104, MsgBus: 1513 2024-11-21T23:12:17.889815Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874366136793726:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:17.890961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00204f/r3tmp/tmpqk846n/pdisk_1.dat 2024-11-21T23:12:18.252398Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:18.282370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:18.282505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22104, node 1 2024-11-21T23:12:18.287930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:18.357933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:18.357969Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:18.357975Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:18.358081Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1513 TClient is connected to server localhost:1513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:18.953772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.969695Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:18.980743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.171112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.317281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.378014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:21.584178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874383316664625:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:21.595250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:21.631604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:21.714939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:21.788915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:21.864874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:21.967140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.001559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.063126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874387611632428:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.063222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.063892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874387611632433:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.067574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:22.078954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874387611632435:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:22.940856Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874366136793726:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:22.952982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:23.153489Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439874391906600055:3633], for# user0@builtin, access# DescribeSchema 2024-11-21T23:12:23.153523Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7439874391906600055:3633], for# user0@builtin, access# DescribeSchema 2024-11-21T23:12:23.163141Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439874391906600052:2465], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:12:23.164695Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjQ0ZWIwNC1jOTE2NGY4OS00YzAyMzA4Zi00NzZjMDY5OQ==, ActorId: [1:7439874391906600045:2461], ActorState: ExecuteState, TraceId: 01jd8fz12yct3pe59nx2t55fyr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 1304, MsgBus: 17986 2024-11-21T23:12:18.769556Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874371103518788:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:18.769596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002022/r3tmp/tmpwRC9nm/pdisk_1.dat 2024-11-21T23:12:19.127144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:19.127248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:19.129134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:19.145404Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1304, node 1 2024-11-21T23:12:19.210240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:19.210258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:19.210266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:19.210373Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17986 TClient is connected to server localhost:17986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:19.759589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.798166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.948469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:20.172864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:20.259752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.295446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874388283389618:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.295587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.549205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.587261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.622764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.663754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.694855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.734975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.830788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874388283390114:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.830888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.831114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874388283390119:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.835984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:22.850800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874388283390121:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:23.769636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874371103518788:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:23.769740Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:24.466617Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230744451, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 9880, MsgBus: 27562 2024-11-21T23:12:09.085095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874332361998623:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:09.085141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021f9/r3tmp/tmpohOcUq/pdisk_1.dat 2024-11-21T23:12:09.616320Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9880, node 1 2024-11-21T23:12:09.662965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:09.663083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:09.666943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:09.709703Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:09.709728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:09.709741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:09.709816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27562 TClient is connected to server localhost:27562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:10.426930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.442020Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:10.451480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.598355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.798313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:10.890004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:12.696874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874345246902155:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:12.722089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:13.046803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:13.085606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:13.156751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:13.197665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:13.232825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:13.268145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:13.336872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874349541869953:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:13.336946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:13.337059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874349541869958:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:13.340744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:13.349676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874349541869960:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:14.085273Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874332361998623:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:14.085355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:14.650259Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230734642, txId: 281474976710671] shutting down waiting... 2024-11-21T23:12:15.844054Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230735834, txId: 281474976710673] shutting down waiting... 2024-11-21T23:12:17.019655Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230737009, txId: 281474976710675] shutting down waiting... 2024-11-21T23:12:18.209062Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230738200, txId: 281474976710677] shutting down waiting... 2024-11-21T23:12:19.375518Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230739369, txId: 281474976710679] shutting down waiting... 2024-11-21T23:12:20.613263Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230740602, txId: 281474976710681] shutting down waiting... 2024-11-21T23:12:21.798046Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230741788, txId: 281474976710683] shutting down waiting... 2024-11-21T23:12:22.966441Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230742954, txId: 281474976710685] shutting down waiting... 2024-11-21T23:12:24.156706Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230744142, txId: 281474976710687] shutting down 2024-11-21T23:12:24.615758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:12:24.615785Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:24.651636Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230744630, txId: 281474976710689] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:142:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:144:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:146:2057] recipient: [4:145:2166] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:148:2057] recipient: [4:145:2166] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:147:2167] Leader for TabletID 72057594037927937 is [4:147:2167] sender: [4:217:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:147:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:150:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:149:2171] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:153:2057] recipient: [5:149:2171] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:152:2172] Leader for TabletID 72057594037927937 is [5:152:2172] sender: [5:222:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:147:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:150:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:149:2171] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:153:2057] recipient: [6:149:2171] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:152:2172] Leader for TabletID 72057594037927937 is [6:152:2172] sender: [6:222:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:149:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:153:2057] recipient: [7:152:2172] Leader for TabletID 72057594037927937 is [7:154:2173] sender: [7:155:2057] recipient: [7:152:2172] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:154:2173] Leader for TabletID 72057594037927937 is [7:154:2173] sender: [7:224:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:154:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:156:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:158:2057] recipient: [8:157:2177] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:160:2057] recipient: [8:157:2177] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:159:2178] Leader for TabletID 72057594037927937 is [8:159:2178] sender: [8:229:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:154:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:158:2057] recipient: [9:156:2177] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:160:2057] recipient: [9:156:2177] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:159:2178] Leader for TabletID 72057594037927937 is [9:159:2178] sender: [9:229:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:159:2057] recipient: [10:158:2177] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:161:2057] recipient: [10:158:2177] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:160:2178] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:230:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:160:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:164:2057] recipient: [11:163:2182] Leader for TabletID 72057594037927937 is [11:165:2183] sender: [11:166:2057] recipient: [11:163:2182] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:165:2183] Leader for TabletID 72057594037927937 is [11:165:2183] sender: [11:235:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for ... TabletID 72057594037927937 is [44:178:2194] sender: [44:248:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:101:2057] recipient: [45:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:101:2057] recipient: [45:99:2133] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:106:2057] recipient: [45:99:2133] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:139:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:176:2057] recipient: [45:97:2132] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:179:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:105:2137] sender: [45:180:2057] recipient: [45:178:2195] Leader for TabletID 72057594037927937 is [45:181:2196] sender: [45:182:2057] recipient: [45:178:2195] !Reboot 72057594037927937 (actor [45:105:2137]) rebooted! !Reboot 72057594037927937 (actor [45:105:2137]) tablet resolver refreshed! new actor is[45:181:2196] Leader for TabletID 72057594037927937 is [45:181:2196] sender: [45:229:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:101:2057] recipient: [46:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:101:2057] recipient: [46:99:2133] Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:106:2057] recipient: [46:99:2133] Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:139:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:178:2057] recipient: [46:97:2132] Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:181:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:105:2137] sender: [46:182:2057] recipient: [46:180:2197] Leader for TabletID 72057594037927937 is [46:183:2198] sender: [46:184:2057] recipient: [46:180:2197] !Reboot 72057594037927937 (actor [46:105:2137]) rebooted! !Reboot 72057594037927937 (actor [46:105:2137]) tablet resolver refreshed! new actor is[46:183:2198] Leader for TabletID 72057594037927937 is [46:183:2198] sender: [46:253:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:101:2057] recipient: [47:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:101:2057] recipient: [47:99:2133] Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:106:2057] recipient: [47:99:2133] Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:139:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:178:2057] recipient: [47:97:2132] Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:181:2057] recipient: [47:180:2197] Leader for TabletID 72057594037927937 is [47:105:2137] sender: [47:182:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:183:2198] sender: [47:184:2057] recipient: [47:180:2197] !Reboot 72057594037927937 (actor [47:105:2137]) rebooted! !Reboot 72057594037927937 (actor [47:105:2137]) tablet resolver refreshed! new actor is[47:183:2198] Leader for TabletID 72057594037927937 is [47:183:2198] sender: [47:253:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:106:2057] recipient: [48:99:2133] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:139:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:181:2057] recipient: [48:97:2132] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:183:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:185:2057] recipient: [48:184:2199] Leader for TabletID 72057594037927937 is [48:186:2200] sender: [48:187:2057] recipient: [48:184:2199] !Reboot 72057594037927937 (actor [48:105:2137]) rebooted! !Reboot 72057594037927937 (actor [48:105:2137]) tablet resolver refreshed! new actor is[48:186:2200] Leader for TabletID 72057594037927937 is [48:186:2200] sender: [48:234:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:106:2057] recipient: [49:99:2133] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:139:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:183:2057] recipient: [49:97:2132] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:186:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:187:2057] recipient: [49:185:2201] Leader for TabletID 72057594037927937 is [49:188:2202] sender: [49:189:2057] recipient: [49:185:2201] !Reboot 72057594037927937 (actor [49:105:2137]) rebooted! !Reboot 72057594037927937 (actor [49:105:2137]) tablet resolver refreshed! new actor is[49:188:2202] Leader for TabletID 72057594037927937 is [49:188:2202] sender: [49:258:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:101:2057] recipient: [50:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:101:2057] recipient: [50:99:2133] Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:106:2057] recipient: [50:99:2133] Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:139:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:183:2057] recipient: [50:97:2132] Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:185:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:105:2137] sender: [50:187:2057] recipient: [50:186:2201] Leader for TabletID 72057594037927937 is [50:188:2202] sender: [50:189:2057] recipient: [50:186:2201] !Reboot 72057594037927937 (actor [50:105:2137]) rebooted! !Reboot 72057594037927937 (actor [50:105:2137]) tablet resolver refreshed! new actor is[50:188:2202] Leader for TabletID 72057594037927937 is [50:188:2202] sender: [50:258:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:101:2057] recipient: [51:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:101:2057] recipient: [51:99:2133] Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:106:2057] recipient: [51:99:2133] Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:139:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:186:2057] recipient: [51:97:2132] Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:189:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:105:2137] sender: [51:190:2057] recipient: [51:188:2203] Leader for TabletID 72057594037927937 is [51:191:2204] sender: [51:192:2057] recipient: [51:188:2203] !Reboot 72057594037927937 (actor [51:105:2137]) rebooted! !Reboot 72057594037927937 (actor [51:105:2137]) tablet resolver refreshed! new actor is[51:191:2204] Leader for TabletID 72057594037927937 is [51:191:2204] sender: [51:239:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:101:2057] recipient: [52:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:101:2057] recipient: [52:99:2133] Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:106:2057] recipient: [52:99:2133] Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:139:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:188:2057] recipient: [52:97:2132] Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:190:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:105:2137] sender: [52:192:2057] recipient: [52:191:2205] Leader for TabletID 72057594037927937 is [52:193:2206] sender: [52:194:2057] recipient: [52:191:2205] !Reboot 72057594037927937 (actor [52:105:2137]) rebooted! !Reboot 72057594037927937 (actor [52:105:2137]) tablet resolver refreshed! new actor is[52:193:2206] Leader for TabletID 72057594037927937 is [52:193:2206] sender: [52:263:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:101:2057] recipient: [53:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:101:2057] recipient: [53:99:2133] Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:106:2057] recipient: [53:99:2133] Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:139:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:105:2137]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:188:2057] recipient: [53:97:2132] Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:191:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:105:2137] sender: [53:192:2057] recipient: [53:190:2205] Leader for TabletID 72057594037927937 is [53:193:2206] sender: [53:194:2057] recipient: [53:190:2205] !Reboot 72057594037927937 (actor [53:105:2137]) rebooted! !Reboot 72057594037927937 (actor [53:105:2137]) tablet resolver refreshed! new actor is[53:193:2206] Leader for TabletID 72057594037927937 is [53:193:2206] sender: [53:263:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:101:2057] recipient: [54:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:101:2057] recipient: [54:99:2133] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:106:2057] recipient: [54:99:2133] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:139:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:191:2057] recipient: [54:97:2132] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:193:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:195:2057] recipient: [54:194:2207] Leader for TabletID 72057594037927937 is [54:196:2208] sender: [54:197:2057] recipient: [54:194:2207] !Reboot 72057594037927937 (actor [54:105:2137]) rebooted! !Reboot 72057594037927937 (actor [54:105:2137]) tablet resolver refreshed! new actor is[54:196:2208] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:106:2057] recipient: [55:99:2133] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:139:2057] recipient: [55:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 23731, MsgBus: 22512 2024-11-21T23:12:18.323683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874373076461178:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:18.323987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002031/r3tmp/tmpPpjKOe/pdisk_1.dat 2024-11-21T23:12:18.723774Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:18.741690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:18.741860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:18.743938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23731, node 1 2024-11-21T23:12:18.816579Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:18.816609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:18.816616Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:18.816731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22512 TClient is connected to server localhost:22512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:19.355463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.379696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:19.551044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.714833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.806903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:21.785362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874385961364618:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:21.785512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.040531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.075563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.114875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.144115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.180901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.216308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:22.291967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874390256332410:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.292087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.292252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874390256332415:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:22.295569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:22.306608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874390256332417:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:23.315725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874373076461178:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:23.315811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:23.650708Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230743638, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 17815, MsgBus: 1568 2024-11-21T23:12:15.673894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874357209479350:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:15.680896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00213f/r3tmp/tmpKLVkiy/pdisk_1.dat 2024-11-21T23:12:16.133365Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17815, node 1 2024-11-21T23:12:16.139231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:16.139361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:16.144047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:16.218963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:16.218993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:16.219045Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:16.219127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1568 TClient is connected to server localhost:1568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:16.749693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.797022Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:16.812828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.992433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.175189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:17.247065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.106228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874374389350096:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.106416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.373358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.415292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.452604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.496694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.572729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.655734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.729473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874374389350601:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.729616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.730231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874374389350606:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.734929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:19.755919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874374389350608:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:20.691957Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874357209479350:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:20.692106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |83.5%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.5%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds |83.5%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |83.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLocks::TwoPhaseTx >> LocalPartition::WithoutPartitionDeadNode [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> KqpTx::RollbackByIdle >> KqpTx::RollbackTx2 >> KqpTx::RollbackTx >> TxUsage::WriteToTopic_Demo_43 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: Trying to start YDB, gRPC: 61581, MsgBus: 19779 2024-11-21T23:12:15.150130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874355981189037:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:15.150210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021cd/r3tmp/tmpEw6rGE/pdisk_1.dat 2024-11-21T23:12:15.602720Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61581, node 1 2024-11-21T23:12:15.645237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:15.645498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:15.650146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:15.687141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:15.687198Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:15.687208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:15.687298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19779 TClient is connected to server localhost:19779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:16.240076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.252161Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:16.264876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:16.400999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:12:16.553116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:16.624459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:18.584371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874368866092421:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.584492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:18.859179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.888071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.922114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.957338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:18.986673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.027245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:19.075851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874373161060211:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.075924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.076078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874373161060216:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:19.079602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:19.089212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874373161060218:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:20.149461Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874355981189037:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:20.149539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 ydb-cpp-sdk/2.6.2 2024-11-21T23:12:25.435747Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230745424, txId: 281474976710683] shutting down >> KqpTx::TooManyTx >> KqpLocks::InvalidateOnCommit |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> LocalPartition::DescribeBadPartition [GOOD] >> LocalPartition::DescribeHang >> TExportToS3Tests::CheckItemProgress >> KqpLocksTricky::TestNoLocksIssue-withSink >> KqpTx::CommitRequired >> TExportToS3Tests::DropSourceTableBeforeTransferring >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> KqpTx::RollbackManyTx >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 >> KqpSinkTx::OlapLocksAbortOnCommit >> KqpSinkTx::OlapExplicitTcl |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TExportToS3Tests::UidAsIdempotencyKey >> TExportToS3Tests::RebootDuringCompletion >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::RebootDuringAbortion >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TxUsage::WriteToTopic_Demo_15 [GOOD] >> KqpTx::RollbackByIdle [GOOD] >> KqpSystemView::NodesRange1 [GOOD] >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight [GOOD] >> BasicUsage::ReadSessionCorrectClose |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::CorruptedDyNumber [GOOD] |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |83.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::TablePermissions [GOOD] >> TxUsage::WriteToTopic_Demo_16 >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> KqpTx::TooManyTx [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> KqpTx::RollbackTx2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackByIdle [GOOD] Test command err: Trying to start YDB, gRPC: 18589, MsgBus: 13763 2024-11-21T23:12:27.786225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874407655301852:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:27.786271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011dc/r3tmp/tmpSOGXpX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18589, node 1 2024-11-21T23:12:28.184046Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:28.184090Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:28.215270Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:28.224192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:28.224754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:28.230073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:28.239744Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:28.239771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:28.239796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:28.239894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13763 TClient is connected to server localhost:13763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:28.828852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:28.870439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.019924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.181317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.249535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.482563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874424835172552:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:31.504098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:31.531051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:31.568567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:31.600430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:31.635070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:31.728982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:31.772272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:31.872941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874424835173053:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:31.873020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:31.873368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874424835173058:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:31.876824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:31.893834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874424835173060:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:32.785355Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874407655301852:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:32.785409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:32.988979Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODU2ZDUwMTctODAyMGE1OTItNzQ4NDlkMzAtYzIyMzJmOTY=, ActorId: [1:7439874429130140668:2460], ActorState: ReadyState, TraceId: 01jd8fzapk4azhv7dh27ndq9s6, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:58.068539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:58.068641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:58.068691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:58.068734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:58.068796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:58.068826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:58.068914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:58.069256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:58.149111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:58.149168Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:58.158907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:58.159299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:58.159477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:58.170897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:58.171287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:58.171916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:58.172539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:58.175567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:58.176836Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:58.176888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:58.176957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:58.176999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:58.177032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:58.177277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:58.183986Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:58.310506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:58.310768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.310988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:58.311246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:58.311311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.313632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:58.313750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:58.313979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.314049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:58.314101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:58.314131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:58.315987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.316039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:58.316075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:58.317672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.317710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.317747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:58.317778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:58.320997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:58.323200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:58.323397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:58.324453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:58.324597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:58.324647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:58.324929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:58.325008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:58.325195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:58.325283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:58.327500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:58.327548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:58.327729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:58.327768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:58.328097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.328135Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:58.328236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:58.328268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:58.328305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:58.328373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:58.328404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:58.328430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:58.328491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:58.328523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:58.328548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... ult: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:32.817448Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:12:32.817778Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 382us result status StatusSuccess 2024-11-21T23:12:32.818850Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::MixedTxFail >> KqpTx::RollbackTx [GOOD] >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |83.6%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TxUsage::WriteToTopic_Demo_41 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 25843, MsgBus: 9287 2024-11-21T23:12:17.191574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874367971706309:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:17.191630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:17.210791Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874366143271648:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:17.210837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:17.233577Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439874367212958527:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:17.275031Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439874366761601705:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:17.275116Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:17.614960Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002087/r3tmp/tmpAIyaCW/pdisk_1.dat 2024-11-21T23:12:17.757433Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:18.238455Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:18.294331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:18.295565Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:18.301295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:18.301451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:18.305707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:18.305814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:18.307126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:18.307251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:18.307360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:18.307433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:18.308065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:18.308107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:18.317801Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T23:12:18.317835Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T23:12:18.317867Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:12:18.318003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:18.320632Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:12:18.320958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:18.323249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:18.323444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:18.323592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:18.358907Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25843, node 1 2024-11-21T23:12:18.393226Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:18.393712Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:18.559599Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:18.559632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:18.559677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:18.559812Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9287 TClient is connected to server localhost:9287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:19.478331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.542987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.770422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:19.984106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:20.150934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:22.194438Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874367971706309:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:22.194519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:22.211082Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874366143271648:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:22.211144Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:22.230991Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439874367212958527:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:22.231064Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:22.276854Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439874366761601705:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:22.276930Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:23.640050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874393741511777:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:23.640256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:23.994284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:24.151179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:24.425907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:24.594270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:24.809629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:24.909251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:25.055214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874402331447032:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:25.055301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:25.055514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874402331447037:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:25.069679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:25.134672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874402331447039:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720668 completed, doublechecking } 2024-11-21T23:12:28.708873Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230748563, txId: 281474976720671] shutting down 2024-11-21T23:12:29.297826Z node 5 :BS_PROXY_PUT ERROR: [c4789191979eeddd] Result# TEvPutResult {Id# [72075186224037896:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:12:29.318070Z node 4 :BS_PROXY_PUT ERROR: [8413b4d04df99ca3] Result# TEvPutResult {Id# [72075186224037900:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:12:29.378911Z node 3 :BS_PROXY_PUT ERROR: [44388d147aeebf80] Result# TEvPutResult {Id# [72075186224037891:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitRoTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDyNumber [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:29.937975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:29.938069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.938103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:29.938152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:29.938195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:29.938242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:29.938305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.938660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:30.021117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:30.021174Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.059490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:30.066594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:30.066783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:30.079489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:30.080190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:30.080823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.081139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:30.096992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.098354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.098441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.098693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:30.098748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.098788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:30.098890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.119245Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:30.304903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:30.305183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.305416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:30.305709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:30.305786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.311874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.312016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:30.312252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.312345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:30.312391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:30.312424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:30.319862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.319961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:30.320015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:30.322237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.322302Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.322356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.322446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.331221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:30.333386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:30.333597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:30.334670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.334829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:30.334883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.335176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:30.335250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.335411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.335517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:30.340407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.340486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.340687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.340733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:30.341189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.341248Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:30.341388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:30.341426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.341471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:30.341512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.341555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:30.341587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:30.341668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:30.341708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:30.341745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:30.343715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.343838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.343875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:30.343915Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:30.343958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.344065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 2:33.765019Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.765079Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T23:12:33.767201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.767338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.767386Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:33.767465Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:33.767599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:33.769296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T23:12:33.769391Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2024-11-21T23:12:33.769955Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:33.770049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:33.770100Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T23:12:33.770209Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T23:12:33.770328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T23:12:34.075144Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:34.075202Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:12:34.075494Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:34.075539Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2024-11-21T23:12:34.076067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.076165Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:23886 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0EFD19BE-E11A-47AC-9309-8F059FF5382E amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: Erasing txId 281474976710759 2024-11-21T23:12:34.078198Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T23:12:34.082677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T23:12:34.082760Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T23:12:34.082809Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T23:12:34.082858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:12:34.082988Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2024-11-21T23:12:34.092404Z node 3 :DATASHARD_BACKUP ERROR: [Export] [scanner] Error read data from table: Invalid DyNumber binary representation REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:23886 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8C3B985C-D318-40B4-8BE0-7A4FE849D8AA amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:12:34.127586Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2024-11-21T23:12:34.154150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:34.154245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T23:12:34.154439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:34.154607Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 434 RawX2: 12884904293 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid DyNumber binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:34.154692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:34.159058Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.159177Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:12:34.159268Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T23:12:34.159891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:34.165025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.168490Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.168563Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T23:12:34.169033Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T23:12:34.169075Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:34.169123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T23:12:34.169194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T23:12:34.169259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:34.169300Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T23:12:34.169335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T23:12:34.169777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:12:34.175186Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T23:12:34.175321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T23:12:34.175596Z node 3 :EXPORT NOTICE: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 3] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2] State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid DyNumber binary representation' } 2024-11-21T23:12:34.183752Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:12:34.183855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:460:2424] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportStartTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:30.259658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:30.259767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.259803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:30.259838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:30.259885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:30.259921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:30.259994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.260458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:30.325536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:30.325589Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.342632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:30.346601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:30.346789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:30.354970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:30.355613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:30.356252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.356573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:30.361642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.363004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.363082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.363333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:30.363407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.363448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:30.363589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.370220Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:30.509907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:30.510160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.510356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:30.510924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:30.511007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.515364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.515491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:30.515722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.515813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:30.515864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:30.515898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:30.519447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.519511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:30.519553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:30.522538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.522597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.522653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.522719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.526848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:30.529580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:30.529810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:30.530903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.531039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:30.531091Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.531409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:30.531478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.531657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.531789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:30.534087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.534141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.534375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.534441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:30.534826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.534889Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:30.535012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:30.535056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.535106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:30.535169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.535208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:30.535244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:30.535315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:30.535375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:30.535416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:30.537583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.537695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.537736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:30.537773Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:30.537817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.537944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 23:12:34.433948Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710757 2024-11-21T23:12:34.443253Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2024-11-21T23:12:34.445050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpCreateConsistentCopyTables CreateConsistentCopyTables { CopyTableDescriptions { SrcPath: "/MyRoot/Table" DstPath: "/MyRoot/export-102/0" OmitIndexes: true OmitFollowers: true IsBackup: true } } Internal: true } TxId: 281474976710758 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:34.445282Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/export-102/0, opId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.445629Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 3], parent name: export-102, child name: 0, child id: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:12:34.445696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 0 2024-11-21T23:12:34.445729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:12:34.445774Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T23:12:34.445848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:12:34.445999Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710758:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:34.446497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:12:34.446564Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:12:34.449390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710758, response: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2024-11-21T23:12:34.449463Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710758, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /MyRoot/export-102/0 2024-11-21T23:12:34.449580Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710758, status# StatusAccepted 2024-11-21T23:12:34.449614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710758 SchemeshardId: 72057594046678944 PathId: 4 2024-11-21T23:12:34.449680Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:34.449700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:12:34.449810Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:12:34.449859Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:34.449883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 3 2024-11-21T23:12:34.449907Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2024-11-21T23:12:34.450405Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.450484Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710758:0 ProgressState, operation type: TxCopyTable, at tablet72057594046678944 2024-11-21T23:12:34.450721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710758:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T23:12:34.451171Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T23:12:34.451245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T23:12:34.451271Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2024-11-21T23:12:34.451297Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T23:12:34.451326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:12:34.451616Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T23:12:34.451690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046678944, cookie: 281474976710758 2024-11-21T23:12:34.451713Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2024-11-21T23:12:34.451739Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 1 2024-11-21T23:12:34.451761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:12:34.451813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2024-11-21T23:12:34.454090Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2024-11-21T23:12:34.454129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2024-11-21T23:12:34.454158Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2024-11-21T23:12:34.458326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2024-11-21T23:12:34.458476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72057594037968897 2024-11-21T23:12:34.458513Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T23:12:34.458817Z node 3 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T23:12:34.459022Z node 3 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2024-11-21T23:12:34.459214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T23:12:34.459257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T23:12:34.459379Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T23:12:34.459424Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T23:12:34.459481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T23:12:34.459567Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 2 -> 3 2024-11-21T23:12:34.460703Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2024-11-21T23:12:34.460886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2024-11-21T23:12:34.463305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.463509Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.463573Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2024-11-21T23:12:34.463624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:2 at tablet# 72057594046678944 2024-11-21T23:12:34.467151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T23:12:34.467265Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2024-11-21T23:12:34.467333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409547 2024-11-21T23:12:34.467357Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:29.507056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:29.507161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.507199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:29.507254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:29.507304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:29.507342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:29.507417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.507782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:29.580945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:29.581026Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:29.597694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:29.601960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:29.602163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:29.609663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:29.610411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:29.611091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:29.611460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:29.616310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.617748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:29.617819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.618085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:29.618160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:29.618206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:29.618335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.643725Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:29.813126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:29.813327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.813513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:29.813708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:29.813751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.816233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:29.816379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:29.816597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.816684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:29.816734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:29.816772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:29.818807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.818868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:29.818907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:29.820775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.820822Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.820875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:29.820954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:29.824702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:29.827574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:29.827797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:29.828736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:29.828874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:29.828923Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:29.829208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:29.829263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:29.829424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:29.829500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:29.831494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:29.831546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:29.831736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.831787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:29.832140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.832203Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:29.832320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:29.832361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:29.832407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:29.832446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:29.832483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:29.832514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:29.832581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:29.832617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:29.832647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:29.834412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:29.834519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:29.834556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:29.834591Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:29.834630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:29.834720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... G: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.702161Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T23:12:33.709907Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.710071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.710127Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:33.710214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:33.710369Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:33.713233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T23:12:33.713378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2024-11-21T23:12:33.714567Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:33.714684Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:33.714749Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T23:12:33.714863Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T23:12:33.714996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:9121 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 639B4455-B0E7-4CB5-96E1-98D072BDD20F amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T23:12:33.812223Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:33.812277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2024-11-21T23:12:33.812531Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:33.812571Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2024-11-21T23:12:33.812996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.813055Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2024-11-21T23:12:33.813672Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T23:12:33.813764Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T23:12:33.813798Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T23:12:33.813833Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T23:12:33.813867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T23:12:33.813942Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:9121 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E37D8507-54E4-4A5D-BCF3-8E13B28BC071 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 43 2024-11-21T23:12:33.822907Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:9121 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 284293B3-4B29-42D2-A259-A823A1A34F12 amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:9121 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 71655A78-9D3C-423F-95C8-B054ECF103AD amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2024-11-21T23:12:33.843866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 498 RawX2: 12884904347 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:33.843931Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2024-11-21T23:12:33.844086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 498 RawX2: 12884904347 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:33.844218Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 498 RawX2: 12884904347 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:33.844316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:33.844371Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.844418Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2024-11-21T23:12:33.844479Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T23:12:33.844650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:33.849726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.849905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:33.849951Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T23:12:33.850065Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T23:12:33.850100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:33.850143Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T23:12:33.850239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T23:12:33.850293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:33.850330Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T23:12:33.850359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T23:12:33.850512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T23:12:33.852803Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T23:12:33.852888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T23:12:33.854821Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:12:33.854891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:524:2478] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::TooManyTx [GOOD] Test command err: Trying to start YDB, gRPC: 19563, MsgBus: 14158 2024-11-21T23:12:27.919115Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874408920696088:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:27.919150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011c5/r3tmp/tmpgLLfQO/pdisk_1.dat 2024-11-21T23:12:28.534296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:28.534387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:28.535181Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:28.539121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19563, node 1 2024-11-21T23:12:28.713916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:28.713937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:28.713944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:28.714060Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14158 TClient is connected to server localhost:14158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:29.413945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.439722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.617900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.854320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.951671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:32.406156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430395534273:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.406271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.677714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.716346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.753831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.789193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.826103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.860710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.919370Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874408920696088:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:32.919447Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:32.931458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430395534768:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.931599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.932232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430395534773:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.977838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:32.989391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874430395534775:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:34.229121Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmJmNzFiNTktN2RjZGM3ZWYtODBhZTE2NjctNDJiNGRkM2U=, ActorId: [1:7439874438985469687:2463], ActorState: ReadyState, TraceId: 01jd8fzbx84c0nvz9f7n75jqg1, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:780: Too many transactions, current active: 2 MaxTxPerSession: 2 >> KqpSystemView::PartitionStatsFollower [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:30.401261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:30.401368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.401413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:30.401452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:30.401499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:30.401538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:30.401615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.401998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:30.491357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:30.491417Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.511284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:30.515370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:30.515526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:30.532283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:30.533802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:30.534592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.534944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:30.555212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.556591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.556662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.556901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:30.556963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.557010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:30.557136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.566701Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:30.729369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:30.729720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.730039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:30.730370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:30.730533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.733611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.733745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:30.734031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.734169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:30.734244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:30.734306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:30.736222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.736300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:30.736347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:30.737988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.738031Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.738142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.738204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.742274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:30.750701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:30.750915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:30.751984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.752140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:30.752184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.752479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:30.752532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.752730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.752811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:30.759882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.759955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.760159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.760204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:30.760644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.760702Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:30.760828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:30.760861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.760918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:30.760960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.761014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:30.761067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:30.761154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:30.761191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:30.761230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:30.768812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.768946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.768982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:30.769036Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:30.769083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.769197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... llectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.056018Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2024-11-21T23:12:34.058986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.059141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.059189Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:34.059296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:34.059430Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:34.061322Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2024-11-21T23:12:34.061436Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2024-11-21T23:12:34.062489Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:34.062603Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:34.062663Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2024-11-21T23:12:34.062776Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2024-11-21T23:12:34.062917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:32054 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CC727931-E64E-4813-BD4C-12595E827859 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2024-11-21T23:12:34.235624Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:34.235732Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:12:34.235993Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:34.236030Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:320542024-11-21T23:12:34.236217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.236292Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2024-11-21T23:12:34.237485Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T23:12:34.237585Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2024-11-21T23:12:34.237617Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2024-11-21T23:12:34.237651Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2024-11-21T23:12:34.237686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:12:34.237768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 595705BB-1694-4029-9620-594AE3D4B96E amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:32054 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F46C5ADB-781B-40EA-A00F-A40F119B0164 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2024-11-21T23:12:34.245042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:32054 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0C7E0573-C1D7-46F3-B23C-6C51DD42D7DB amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2024-11-21T23:12:34.285136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:34.285200Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T23:12:34.285356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:34.285473Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 439 RawX2: 12884904298 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:34.285544Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:34.285598Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.285642Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:12:34.285691Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T23:12:34.285855Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:34.292665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.292966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.293005Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T23:12:34.293102Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T23:12:34.293124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:34.293165Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T23:12:34.293226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T23:12:34.293258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:12:34.293281Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T23:12:34.293304Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T23:12:34.293387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:12:34.295088Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T23:12:34.295179Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T23:12:34.296978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:12:34.297041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:469:2433] TestWaitNotification: OK eventTxId 103 >> TxUsage::WriteToTopic_Demo_32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx2 [GOOD] Test command err: Trying to start YDB, gRPC: 4067, MsgBus: 4847 2024-11-21T23:12:27.939883Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874408609677299:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:27.939934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011bc/r3tmp/tmp3Ua84G/pdisk_1.dat 2024-11-21T23:12:28.405685Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:28.408414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:28.408525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:28.413027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4067, node 1 2024-11-21T23:12:28.629325Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:28.629349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:28.629358Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:28.629483Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4847 TClient is connected to server localhost:4847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:29.271098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.291455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:29.312889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.483017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.716934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.799093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:32.385003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430084515271:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.405263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.438982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.483701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.521005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.546588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.578167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.623998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.719436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430084515767:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.719503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.719867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430084515772:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.723445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:32.741662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874430084515774:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:32.938504Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874408609677299:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:32.938563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:34.316911Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzU5NTU1OGQtMTlkZWI2ODMtYTE0ZGUyNzEtNzFkMzE0ZWE=, ActorId: [1:7439874434379483387:2461], ActorState: ReadyState, TraceId: 01jd8fzc0366q358k66fys96va, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx [GOOD] Test command err: Trying to start YDB, gRPC: 11127, MsgBus: 4338 2024-11-21T23:12:27.927338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874408892700486:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:27.927387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001209/r3tmp/tmpjbyuWb/pdisk_1.dat 2024-11-21T23:12:28.330609Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:28.340127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:28.340248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:28.343886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11127, node 1 2024-11-21T23:12:28.492287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:28.492309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:28.492316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:28.492456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4338 TClient is connected to server localhost:4338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:29.191574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.229191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.441345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.701044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.811113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:32.269522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430367538462:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.283847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.524759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.559837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.654749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.735047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.772165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.848576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.927596Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874408892700486:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:32.927698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:32.934628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430367538970:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.934700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.934750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430367538975:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.939168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:32.968497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874430367538977:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:34.578361Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTcwMTAyNzItM2I1ODk1ZDctNDAxMjg1NzMtMTFhMGRkYTU=, ActorId: [1:7439874438957473885:2462], ActorState: ReadyState, TraceId: 01jd8fzc8743jjnmf85s1zbm6r, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:29.747383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:29.747546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.747601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:29.747660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:29.747735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:29.747783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:29.747864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.748271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:29.827637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:29.827709Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:29.837771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:29.841432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:29.841627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:29.862091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:29.872000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:29.872777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:29.873187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:29.877919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.878991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:29.879043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.879218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:29.879268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:29.879310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:29.879381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.887652Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:30.052680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:30.053029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.053347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:30.053716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:30.053823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.058990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.059181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:30.059539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.059663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:30.059738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:30.059816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:30.063459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.063541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:30.063580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:30.065373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.065437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.065485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.065556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.069352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:30.076548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:30.076803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:30.077942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.078148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:30.078203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.078535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:30.078600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.078797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.078909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:30.082253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.082345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.082561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.082606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:30.083018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.083100Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:30.083212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:30.083260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.083313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:30.083354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.083393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:30.083443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:30.083514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:30.083551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:30.083584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:30.085511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.085643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.085677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:30.085712Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:30.085750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.085849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 3 Issue: '' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2024-11-21T23:12:34.627281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.627333Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.627479Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2024-11-21T23:12:34.627615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:34.628226Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.628306Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.628342Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T23:12:34.628374Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T23:12:34.628409Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T23:12:34.628761Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.628822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.628847Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T23:12:34.628875Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T23:12:34.628913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:12:34.629010Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T23:12:34.632598Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T23:12:34.632961Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T23:12:34.633020Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T23:12:34.633060Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T23:12:34.633840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T23:12:34.633970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2024-11-21T23:12:34.634541Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:34.634667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:34.634748Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2024-11-21T23:12:34.634900Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T23:12:34.634966Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T23:12:34.635005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T23:12:34.635091Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:12:34.635174Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:12:34.635242Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T23:12:34.635309Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T23:12:34.635348Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T23:12:34.635382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T23:12:34.635445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:12:34.635511Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T23:12:34.635562Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T23:12:34.635596Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T23:12:34.636336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.636449Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.638378Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:34.638461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:34.638629Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:12:34.638790Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:34.638829Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T23:12:34.638866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T23:12:34.639870Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.639988Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.640037Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T23:12:34.640080Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T23:12:34.640129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T23:12:34.640761Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.640841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.640869Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T23:12:34.640900Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T23:12:34.640929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:12:34.641017Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T23:12:34.641067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:123:2149] 2024-11-21T23:12:34.645222Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.645375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:34.645453Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T23:12:34.645524Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T23:12:34.645572Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T23:12:34.645603Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2024-11-21T23:12:34.645634Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2024-11-21T23:12:34.647533Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T23:12:34.647638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:12:34.647705Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:823:2760] TestWaitNotification: OK eventTxId 103 >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TxUsage::WriteToTopic_Demo_33 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 17572, MsgBus: 61777 2024-11-21T23:12:20.587927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874379434497646:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:20.596771Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fdc/r3tmp/tmpPPn7Vx/pdisk_1.dat 2024-11-21T23:12:21.032994Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:21.053990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:21.054104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:21.055671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17572, node 1 2024-11-21T23:12:21.167017Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:21.167045Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:21.167053Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:21.167150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61777 TClient is connected to server localhost:61777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:21.695828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:21.710077Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:22.054196Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:12:22.054240Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:12:22.054309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7439874379434497899:2192], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:12:22.054330Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:12:23.057174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:12:23.057219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:12:23.057290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7439874379434497899:2192], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:12:23.057301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:12:23.619112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874392319400013:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:23.619224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:23.801789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439874392319400038:2303], Recipient [1:7439874379434497899:2192]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:12:23.801830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:12:23.801844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T23:12:23.801882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439874392319400034:2300], Recipient [1:7439874379434497899:2192]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2024-11-21T23:12:23.801896Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:12:23.880674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:12:23.881138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:12:23.881301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2024-11-21T23:12:23.881803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T23:12:23.881840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T23:12:23.881877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T23:12:23.881995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T23:12:23.882011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:12:23.882758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:12:23.882781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:12:23.882877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2024-11-21T23:12:23.882901Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2024-11-21T23:12:23.882908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:12:23.882938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T23:12:23.883428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T23:12:23.883558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Followers 2024-11-21T23:12:23.883576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T23:12:23.883588Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976710658:0 2024-11-21T23:12:23.883797Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:7439874379434497899:2192], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T23:12:23.883815Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T23:12:23.883849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:12:23.883865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:12:23.883992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T23:12:23.884080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:12:23.884098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439874383729465281:2240], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2024-11-21T23:12:23.884109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439874383729465281:2240], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2024-11-21T23:12:23.884145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:12:23.884177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-1 ... at datashard 72075186224037888 FollowerId 2, TableInfos size = 1 2024-11-21T23:12:33.983632Z node 1 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 2, tableId 2 2024-11-21T23:12:33.983735Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7439874392319400110:2310]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T23:12:33.983753Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2024-11-21T23:12:33.983770Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T23:12:33.983908Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7439874392319400111:2311]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2024-11-21T23:12:33.983922Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2024-11-21T23:12:33.983938Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T23:12:33.984416Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439874435269073375:2518], Recipient [1:7439874379434497899:2192]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:12:33.984457Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:12:33.984478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T23:12:33.984659Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877760, Sender [1:7439874435269073374:2384], Recipient [1:7439874392319400109:2309]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T23:12:33.984681Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T23:12:33.984952Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7439874392319400109:2309], Recipient [1:7439874379434497899:2192]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { } ShardState: 3 NodeId: 1 StartTime: 1732230743961 TableOwnerId: 72057594046644480 FollowerId: 2 2024-11-21T23:12:33.984969Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2024-11-21T23:12:33.984998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 2 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Readonly' dataSize 0 rowCount 0 cpuUsage 0 2024-11-21T23:12:33.985056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 2 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2024-11-21T23:12:34.062607Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T23:12:34.062664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T23:12:34.062689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2024-11-21T23:12:34.062750Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2024-11-21T23:12:34.062766Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2024-11-21T23:12:34.062833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 768 row count 4 2024-11-21T23:12:34.062897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2024-11-21T23:12:34.062917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 2: RowCount 4, DataSize 768 2024-11-21T23:12:34.062931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2024-11-21T23:12:34.062993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2024-11-21T23:12:34.063045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 0 row count 0 2024-11-21T23:12:34.063063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=2, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2024-11-21T23:12:34.063074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=2, pathId 2: RowCount 0, DataSize 0 2024-11-21T23:12:34.063085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 2 2024-11-21T23:12:34.063141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T23:12:34.063235Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T23:12:34.063251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T23:12:34.063261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2024-11-21T23:12:34.063433Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 768 RowCount: 4 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1732230743893 AccessTime: 1732230744405 UpdateTime: 1732230744257 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 2024-11-21T23:12:34.063506Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 2 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1732230743961 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 2 2024-11-21T23:12:34.088205Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:12:34.088246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:12:34.088305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7439874379434497899:2192], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:12:34.088320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:12:35.090785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:12:35.090830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:12:35.090887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7439874379434497899:2192], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:12:35.090905Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats, attempt 2 2024-11-21T23:12:35.810922Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439874443859007993:2396], owner: [1:7439874443859007990:2394], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:12:35.832446Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439874443859007993:2396], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:12:35.834779Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274595843, Sender [1:7439874443859007993:2396], Recipient [1:7439874379434497899:2192]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2024-11-21T23:12:35.834809Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2024-11-21T23:12:35.837438Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439874443859007993:2396], row count: 2, finished: 1 2024-11-21T23:12:35.837485Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439874443859007993:2396], owner: [1:7439874443859007990:2394], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:12:35.849238Z node 1 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [1:7439874379434497500:2056], database# /Root, query hash# 14960494650040056739, cpu time# 378861 2024-11-21T23:12:36.002592Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7439874379434497468:2068], interval end# 2024-11-21T23:12:36.000000Z, event interval end# 2024-11-21T23:12:36.000000Z 2024-11-21T23:12:36.002639Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7439874379434497468:2068], query logs count# 0, processor ids count# 0, processor id to database count# 0 2024-11-21T23:12:36.006167Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7439874379434497500:2056], interval end# 2024-11-21T23:12:36.000000Z, event interval end# 2024-11-21T23:12:36.000000Z 2024-11-21T23:12:36.006225Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7439874379434497500:2056], query logs count# 1, processor ids count# 1, processor id to database count# 0 2024-11-21T23:12:36.030654Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435089, Sender [0:0:0], Recipient [1:7439874379434497899:2192]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2024-11-21T23:12:36.030718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:12:36.030756Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> KqpTx::SnapshotRO >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_41 [GOOD] Test command err: 2024-11-21T23:10:29.945228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873904895224113:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:29.945467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:30.300339Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bf2/r3tmp/tmpQ9Nkaf/pdisk_1.dat 2024-11-21T23:10:30.637173Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:30.667862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:30.667957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:30.669725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28635, node 1 2024-11-21T23:10:30.750634Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002bf2/r3tmp/yandexfhXeAF.tmp 2024-11-21T23:10:30.750672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002bf2/r3tmp/yandexfhXeAF.tmp 2024-11-21T23:10:30.750859Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002bf2/r3tmp/yandexfhXeAF.tmp 2024-11-21T23:10:30.750981Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:30.816444Z INFO: TTestServer started on Port 11732 GrpcPort 28635 TClient is connected to server localhost:11732 PQClient connected to localhost:28635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:31.212468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:31.238037Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:31.254263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:10:31.422539Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:10:33.665176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873922075093946:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.665585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873922075093959:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.665646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.669538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:33.703006Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T23:10:33.706637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873922075093961:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:34.190801Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873926370061332:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:34.193392Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGE4MTU0MGQtNzYzOGZhNGQtMzM0YWMyYzItOTYwMTE5MzM=, ActorId: [1:7439873922075093929:2307], ActorState: ExecuteState, TraceId: 01jd8fvp5m5vvpmagd2nrjc2gb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:34.195278Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:34.198597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:34.237596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:34.348355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873926370061604:2621] 2024-11-21T23:10:34.894769Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873904895224113:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:34.894839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:40.829593Z :WriteToTopic_Demo_4 INFO: TTopicSdkTestSetup started 2024-11-21T23:10:40.846061Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:40.894615Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:40.895036Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:40.895710Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:40.895952Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:40.895994Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:40.896013Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:40.896033Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:40.896077Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:40.896117Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:40.896148Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:40.912700Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873952139865707:2813] connected; active server actors: 1 2024-11-21T23:10:40.912970Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:40.913412Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:40.913474Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873952139865706:2812], now have 1 active actors on pipe 2024-11-21T23:10:40.914748Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:40.914909Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:40.927584Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:40.927616Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:40.946311Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439873909190191684 RawX2: 4294969489 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "test-topic" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/test-topic" ... essage_group_id' SeqNo: 85 partNo : 1 messageNo: 169 size 488129 offset: -1 2024-11-21T23:12:35.844125Z node 9 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|413e3204-a31dcd22-500d716c-5a7aefa4_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T23:12:35.854012Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|413e3204-a31dcd22-500d716c-5a7aefa4_0 grpc closed 2024-11-21T23:12:35.854057Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|413e3204-a31dcd22-500d716c-5a7aefa4_0 is DEAD 2024-11-21T23:12:35.863711Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T23:12:35.863810Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:12:35.863849Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:12:35.864588Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 2000234 2024-11-21T23:12:35.864657Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.864718Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715674}, 100000}, SeqNo: 82, partNo: 0, Offset: 81 is stored on disk 2024-11-21T23:12:35.864758Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.864794Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715674}, 100000}, SeqNo: 82, partNo: 1, Offset: 81 is stored on disk 2024-11-21T23:12:35.864816Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.864857Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715674}, 100000}, SeqNo: 83, partNo: 0, Offset: 82 is stored on disk 2024-11-21T23:12:35.864882Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.864933Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715674}, 100000}, SeqNo: 83, partNo: 1, Offset: 82 is stored on disk 2024-11-21T23:12:35.866716Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 164 requestId: cookie: 82 2024-11-21T23:12:35.866836Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 165 requestId: cookie: 83 2024-11-21T23:12:35.866876Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T23:12:35.866906Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T23:12:35.866991Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Reserve bytes in transaction. Partition: 0, WriteId: { NodeId: 9 KeyId: 281474976715674 }, NeedSupportivePartition: 0 2024-11-21T23:12:35.867029Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:12:35.867066Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439874433257288167:2475] destroyed 2024-11-21T23:12:35.867082Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:12:35.867100Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439874433257288163:2475] destroyed 2024-11-21T23:12:35.867142Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:12:35.881284Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715674}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 84 partNo 0 2024-11-21T23:12:35.881360Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715674}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 84 partNo 1 2024-11-21T23:12:35.890474Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715674}, 100000} part blob complete sourceId '\0test-message_group_id' seqNo 84 partNo 1 FormedBlobsCount 0 NewHead: Offset 83 PartNo 0 PackedSize 1000253 count 1 nextOffset 84 batches 2 2024-11-21T23:12:35.890603Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715674}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 85 partNo 0 2024-11-21T23:12:35.890654Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715674}, 100000} part blob processing sourceId '\0test-message_group_id' seqNo 85 partNo 1 2024-11-21T23:12:35.905832Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976715674}, 100000} part blob complete sourceId '\0test-message_group_id' seqNo 85 partNo 1 FormedBlobsCount 0 NewHead: Offset 83 PartNo 0 PackedSize 2000467 count 2 nextOffset 85 batches 3 2024-11-21T23:12:35.906670Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {9, 281474976715674}, 100000} compactOffset 77,8 HeadOffset 77 endOffset 83 curOffset 85 D0000100000_00000000000000000077_00000_0000000008_00008 size 8001828 WTime 1732230755906 2024-11-21T23:12:35.947464Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:12:35.947630Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 100000 offset 77 partNo 0 count 8 size 8001828 2024-11-21T23:12:35.947956Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:12:35.948005Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 170 requestId: cookie: 86 2024-11-21T23:12:35.956278Z node 9 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 100000 offset 77 count 8 size 8001828 actorID [9:7439874433257288059:2454] 2024-11-21T23:12:35.956427Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 2000234 2024-11-21T23:12:35.956488Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.956556Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715674}, 100000}, SeqNo: 84, partNo: 0, Offset: 83 is stored on disk 2024-11-21T23:12:35.956602Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.956649Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715674}, 100000}, SeqNo: 84, partNo: 1, Offset: 83 is stored on disk 2024-11-21T23:12:35.956673Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.956720Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715674}, 100000}, SeqNo: 85, partNo: 0, Offset: 84 is stored on disk 2024-11-21T23:12:35.956750Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.956797Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976715674}, 100000}, SeqNo: 85, partNo: 1, Offset: 84 is stored on disk 2024-11-21T23:12:35.957675Z node 9 :PERSQUEUE DEBUG: PQ Cache (L2). Evicting blob. Tablet '72075186224037894' partition 100000 offset 47 size 8001828 2024-11-21T23:12:35.957697Z node 9 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037894' partition 100000 offset 77 size 8001828 2024-11-21T23:12:35.963419Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 168 requestId: cookie: 84 2024-11-21T23:12:35.963520Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 169 requestId: cookie: 85 2024-11-21T23:12:35.963619Z node 9 :PERSQUEUE DEBUG: Erasing blob in L1. Partition 100000 offset 47 size 8001828 cause it's been evicted from L2. Actual L1 size: 4 2024-11-21T23:12:35.963638Z node 9 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic topic_A, tablet id72075186224037894, cookie 0 2024-11-21T23:12:35.981994Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976715674 LockNode: 9 Status: STATUS_NOT_FOUND 2024-11-21T23:12:35.982058Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] delete partitions for WriteId {9, 281474976715674} 2024-11-21T23:12:35.983128Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.984967Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976715674}, 100000}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2024-11-21T23:12:35.989543Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {9, 281474976715674}, 100000} 2024-11-21T23:12:35.989666Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {9, 281474976715674} 2024-11-21T23:12:35.989737Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T23:12:35.991887Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit [GOOD] >> KqpLocks::Invalidate >> KqpSinkLocks::EmptyRangeAlreadyBroken >> KqpSinkTx::DeferredEffects >> KqpSinkLocks::UncommittedRead >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> TStorageTenantTest::Empty [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:30.762578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:30.762687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.762738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:30.762773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:30.762834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:30.762882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:30.762955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.763252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:30.860234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:30.860287Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.879291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:30.883752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:30.883928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:30.891295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:30.891941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:30.892550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.892848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:30.896872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.898229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.898289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.898560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:30.898613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.898651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:30.898767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.905404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:31.028779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:31.029056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.029293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:31.029599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:31.029661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.035678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:31.035897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:31.036127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.036215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:31.036264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:31.036304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:31.038912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.038973Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:31.039010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:31.040785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.040835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.040902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.040989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.044710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:31.046483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:31.046646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:31.047392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:31.047498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:31.047536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.047749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:31.047787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.047910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:31.047966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:31.049377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:31.049410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:31.049527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:31.049556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:31.049841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.049886Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:31.049979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:31.050008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.050042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:31.050072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.050096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:31.050118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:31.050158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:31.050185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:31.050212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:31.051652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:31.051765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:31.051800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:31.051835Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:31.051871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:31.051981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... lt, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 281474976710765:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T23:12:37.227375Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 3 -> 128 2024-11-21T23:12:37.231494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T23:12:37.231708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T23:12:37.231773Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:37.231860Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710765 ready parts: 1/1 2024-11-21T23:12:37.232025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976710765 MinStep: 5000010 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:37.237716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2024-11-21T23:12:37.237886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 281474976710765 at step: 5000010 2024-11-21T23:12:37.238258Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:37.238375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 12884904041 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:37.238472Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 HandleReply TEvOperationPlan, stepId: 5000010, at schemeshard: 72057594046678944 2024-11-21T23:12:37.238647Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 128 -> 129 2024-11-21T23:12:37.238803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 REQUEST: PUT /Backup2/metadata.json HTTP/1.1 HEADERS: Host: localhost:14358 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A38D3E11-54B7-4ABB-9848-7150F9ACCBAE amz-sdk-request: attempt=1 content-length: 73 content-md5: 5UnTthDw7DG9u0TfCJZu+w== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000010 2024-11-21T23:12:37.296950Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:37.297015Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T23:12:37.297297Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:37.297336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:201:2204], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 7 2024-11-21T23:12:37.297849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T23:12:37.297938Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710765 2024-11-21T23:12:37.298867Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2024-11-21T23:12:37.298947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2024-11-21T23:12:37.298979Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2024-11-21T23:12:37.299011Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2024-11-21T23:12:37.299071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2024-11-21T23:12:37.299152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true REQUEST: PUT /Backup2/permissions.pb HTTP/1.1 HEADERS: Host: localhost:14358 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C00F1D8E-E834-49A6-A5B2-D8F4CC7B421E amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/permissions.pb / / 43 2024-11-21T23:12:37.305422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: PUT /Backup2/scheme.pb HTTP/1.1 HEADERS: Host: localhost:14358 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2B9F915D-EF7B-4945-AF0F-4132770BCD8C amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/scheme.pb / / 355 REQUEST: PUT /Backup2/data_00.csv HTTP/1.1 HEADERS: Host: localhost:14358 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B6171F10-75FA-43E5-B976-53B311B3C9F5 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/data_00.csv / / 0 2024-11-21T23:12:37.373961Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 793 RawX2: 12884904623 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:37.374049Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2024-11-21T23:12:37.374201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 793 RawX2: 12884904623 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:37.374332Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 793 RawX2: 12884904623 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2024-11-21T23:12:37.374423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:37.374488Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T23:12:37.374542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T23:12:37.374598Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 129 -> 240 2024-11-21T23:12:37.374789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:37.383518Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T23:12:37.383952Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2024-11-21T23:12:37.384006Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2024-11-21T23:12:37.384136Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2024-11-21T23:12:37.384167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2024-11-21T23:12:37.384208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2024-11-21T23:12:37.384289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710765 2024-11-21T23:12:37.384336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2024-11-21T23:12:37.384370Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2024-11-21T23:12:37.384406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710765:0 2024-11-21T23:12:37.384541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T23:12:37.387438Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2024-11-21T23:12:37.387521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710765 2024-11-21T23:12:37.389542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:12:37.389607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:824:2759] TestWaitNotification: OK eventTxId 104 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> YdbOlapStore::LogTsRangeDescending [GOOD] >> TExportToS3Tests::AuditCancelledExport [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> TxUsage::WriteToTopic_Demo_24 >> LocalPartition::DescribeHang [GOOD] >> LocalPartition::DiscoveryHang >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:12:30.870937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:30.871033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.871076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:30.871124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:30.871163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:30.871190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:30.871242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.871590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:30.943182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:30.943232Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.953654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:30.953763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:30.953931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:30.965691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:30.965919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:30.966510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.966735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:30.969467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.970683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.970735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.971008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:30.971053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.971089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:30.971167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.977361Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:31.109239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:31.109495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.109708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:31.109967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:31.110040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.113232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:31.113505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:31.113863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.114034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:31.114100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:31.114139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:31.116333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.116414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:31.116458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:31.118474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.118528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.118600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.118702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.132491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:31.134437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:31.134705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:31.136201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:31.136409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:31.136477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.136788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:31.136869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.137090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:31.137214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:31.139758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:31.139842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:31.140048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:31.140096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:31.140464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.140507Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:31.140641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:31.140699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.140741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:31.140774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.140808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:31.140836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:31.140893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:31.140991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:31.141018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:31.142631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:31.142730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:31.142765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:31.142796Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:31.142828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:31.142904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... opose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:7118" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:40.843635Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:40.843847Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:12:40.844288Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:40.844356Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:40.845918Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:12:40.845985Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:12:40.865001Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:40.865441Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2024-11-21T23:12:40.865895Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2024-11-21T23:12:40.865996Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2024-11-21T23:12:40.866546Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:40.866629Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet72057594046678944 2024-11-21T23:12:40.866697Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2024-11-21T23:12:40.866744Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2024-11-21T23:12:40.879891Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2024-11-21T23:12:40.880008Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2024-11-21T23:12:40.880707Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:40.880769Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:40.880974Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T23:12:40.894815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2024-11-21T23:12:40.895336Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:40.895391Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:12:40.895586Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T23:12:40.896521Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2024-11-21T23:12:40.896671Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2024-11-21T23:12:40.897427Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T23:12:40.897615Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2024-11-21T23:12:40.910124Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T23:12:40.910517Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:12:40.910578Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:553:2515] TestWaitNotification: OK eventTxId 102 AUDIT LOG buffer(7): 2024-11-21T23:12:39.488141Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:12:39.605795Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:12:40.298026Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT START, status=SUCCESS, detailed_status=SUCCESS, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none} 2024-11-21T23:12:40.308053Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE DIRECTORY, paths=[/MyRoot/export-102], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:12:40.349114Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=CREATE TABLE COPY FROM, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:12:40.865328Z: component=schemeshard, tx_id=281474976710759, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=BACKUP TABLE, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:12:40.896989Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2024-11-21T23:12:39.548624Z, end_time=2024-11-21T23:13:09.596624Z AUDIT LOG checked line: 2024-11-21T23:12:40.896989Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2024-11-21T23:12:39.548624Z, end_time=2024-11-21T23:13:09.596624Z >> KqpTx::RollbackManyTx [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> KqpTx::CommitRoTx [GOOD] |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |83.7%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackManyTx [GOOD] Test command err: Trying to start YDB, gRPC: 3757, MsgBus: 29506 2024-11-21T23:12:29.208545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874419165278660:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:29.209144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00119b/r3tmp/tmpyvLgkt/pdisk_1.dat 2024-11-21T23:12:29.747141Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3757, node 1 2024-11-21T23:12:29.756469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:29.756584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:29.759569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:29.846554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:29.846588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:29.846599Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:29.846681Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29506 TClient is connected to server localhost:29506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:30.699739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:30.716192Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:30.732220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:30.889001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.049295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.117079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:33.189869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874436345149407:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.189987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.442483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.486814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.551715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.620624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.666816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.705885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.809100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874436345149911:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.809188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.809311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874436345149916:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.812260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:33.825072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874436345149918:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:34.190633Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874419165278660:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:34.190699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 17266, MsgBus: 25211 2024-11-21T23:12:29.150333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874417797232654:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:29.150378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011b4/r3tmp/tmpVgxrSL/pdisk_1.dat 2024-11-21T23:12:29.608987Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:29.613257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:29.616618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:29.619377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17266, node 1 2024-11-21T23:12:29.794879Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:29.794905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:29.794930Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:29.795025Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25211 TClient is connected to server localhost:25211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:30.522157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:30.610849Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:30.622605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:30.769775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:30.931034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:30.997417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:32.894742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874430682136021:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.894859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.177004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.218132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.295215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.358848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.431034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.489256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.592388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874434977103826:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.592485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.592826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874434977103831:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.601049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:33.617127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874434977103833:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:34.155626Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874417797232654:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:34.155717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1656, MsgBus: 16927 2024-11-21T23:12:36.213184Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874446897443918:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011b4/r3tmp/tmpWVWVmR/pdisk_1.dat 2024-11-21T23:12:36.278076Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:36.410930Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:36.463411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:36.463509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:36.468281Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1656, node 2 2024-11-21T23:12:36.665902Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:36.665951Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:36.665960Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:36.666080Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16927 TClient is connected to server localhost:16927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:37.389924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:37.427103Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:37.455324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:37.589045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:37.808002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:38.046111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:41.203052Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874446897443918:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:41.203154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:42.091783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874472667249230:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.091869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.300417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.405764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.495777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.556259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.617067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.748496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.822812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874472667249745:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.822914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.823181Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874472667249750:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.834122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:42.850918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874472667249752:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> HttpRequest::Status [GOOD] >> KqpLocks::MixedTxFail [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:12:29.867097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:29.867190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.867238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:29.867280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:29.867322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:29.867346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:29.867392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.867754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:29.941729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:29.941793Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:29.956662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:29.956778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:29.956950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:29.971964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:29.972257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:29.972844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:29.973068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:29.976141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.977503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:29.977575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.977841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:29.977890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:29.977937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:29.978023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.984891Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:30.125898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:30.126211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.126655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:30.127005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:30.127085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.130077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.130313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:30.130650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.130744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:30.130788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:30.130826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:30.133155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.133215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:30.133255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:30.141317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.141420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.141488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.141558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.145931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:30.148353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:30.148577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:30.149769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.149977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:30.150040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.150352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:30.150448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.150647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.150774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:30.154039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.154095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.154349Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.154423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:30.154865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.154916Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:30.155023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:30.155069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.155119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:30.155163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.155207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:30.155248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:30.155335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:30.155411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:30.155455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:30.165332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.165504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.165550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:30.165595Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:30.165677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.165840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... d manually' Items: 2 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710763 2024-11-21T23:12:46.008653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T23:12:46.008691Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T23:12:46.008746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2024-11-21T23:12:46.008869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:46.009714Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.009795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.009835Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T23:12:46.009869Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T23:12:46.009902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T23:12:46.010997Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.011098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.011127Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T23:12:46.011156Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2024-11-21T23:12:46.011184Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:12:46.011246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T23:12:46.012940Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T23:12:46.013715Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T23:12:46.013756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2024-11-21T23:12:46.013791Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2024-11-21T23:12:46.014657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2024-11-21T23:12:46.014858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:12:46.015093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2024-11-21T23:12:46.015468Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:46.015575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 12884904042 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:46.015631Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2024-11-21T23:12:46.015800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2024-11-21T23:12:46.015871Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2024-11-21T23:12:46.015904Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T23:12:46.015959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:12:46.016012Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:12:46.016078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2024-11-21T23:12:46.016125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2024-11-21T23:12:46.016162Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2024-11-21T23:12:46.016197Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2024-11-21T23:12:46.016257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:12:46.016301Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2024-11-21T23:12:46.016345Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T23:12:46.016394Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2024-11-21T23:12:46.017532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.023815Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:46.023893Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:46.024099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2024-11-21T23:12:46.024227Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:46.024264Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2024-11-21T23:12:46.024318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:202:2205], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2024-11-21T23:12:46.025116Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.025276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.025334Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T23:12:46.025387Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T23:12:46.025450Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T23:12:46.025993Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.026083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.026118Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2024-11-21T23:12:46.026145Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T23:12:46.026172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:12:46.026254Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2024-11-21T23:12:46.026293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:124:2150] 2024-11-21T23:12:46.042310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.047572Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2024-11-21T23:12:46.047740Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2024-11-21T23:12:46.047803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2024-11-21T23:12:46.047866Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T23:12:46.047894Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2024-11-21T23:12:46.047928Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2024-11-21T23:12:46.055282Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T23:12:46.055420Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:12:46.055505Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:753:2693] TestWaitNotification: OK eventTxId 103 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> TxUsage::WriteToTopic_Demo_43 [GOOD] >> KqpLocks::Invalidate [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 13609, MsgBus: 28145 2024-11-21T23:12:29.177018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874418303014305:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:29.177626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011a4/r3tmp/tmpY46Mym/pdisk_1.dat 2024-11-21T23:12:29.642643Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:29.651865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:29.651938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:29.654853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13609, node 1 2024-11-21T23:12:29.854902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:29.854934Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:29.854948Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:29.855030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28145 TClient is connected to server localhost:28145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:30.583714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:30.596435Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:30.610383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:30.866544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.049922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.154335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:33.402661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874435482884976:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.417007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.500756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.553352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.586652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.629460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.673924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.756052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.819773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874435482885475:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.819869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.820133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874435482885480:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.824650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:33.835504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874435482885482:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:34.194920Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874418303014305:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:34.194984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:35.759207Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmQyYTlhODEtMjA0MGUxODItZjg0NWE0MjEtOThhOTUwMzY=, ActorId: [1:7439874444072820444:2459], ActorState: ExecuteState, TraceId: 01jd8fzdc11mz924trzjrnwv3j, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken Trying to start YDB, gRPC: 8322, MsgBus: 29852 2024-11-21T23:12:36.771611Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874447376831150:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:36.771659Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011a4/r3tmp/tmpnSZ5iU/pdisk_1.dat 2024-11-21T23:12:37.014054Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:37.043663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:37.043768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:37.049639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8322, node 2 2024-11-21T23:12:37.210959Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:37.210986Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:37.210994Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:37.211091Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29852 TClient is connected to server localhost:29852 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:38.072674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:38.082822Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:38.109623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:38.235154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:38.559680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:38.743242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:41.774575Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874447376831150:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:41.774645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:42.138688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874473146636621:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.138800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.263153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.326169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.374939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.420601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.482968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.571233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:42.673950Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874473146637123:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.674023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.674542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874473146637128:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.678821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:42.703965Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:12:42.704250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874473146637130:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:45.689877Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTJjZGIwOGYtZDZiM2U1YTAtOTExZTBhNTQtMzI2ZjM3ZDk=, ActorId: [2:7439874481736572065:2466], ActorState: ExecuteState, TraceId: 01jd8fzpxh897k0jzy242j44kt, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail [GOOD] Test command err: Trying to start YDB, gRPC: 5985, MsgBus: 20881 2024-11-21T23:12:27.912611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874411735886365:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:27.912693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011b5/r3tmp/tmpOkWjdN/pdisk_1.dat 2024-11-21T23:12:28.331296Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:28.339149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:28.339257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:28.343152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5985, node 1 2024-11-21T23:12:28.426997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:28.427034Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:28.427043Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:28.427146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20881 TClient is connected to server localhost:20881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:29.099526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.112332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:29.125513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.398422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.595720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.697120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.536278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874428915757245:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:31.536392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:31.889790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:31.927788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:31.981752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.030461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.068166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.161611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.250593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874433210725044:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.250690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.250913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874433210725049:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.255081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:32.265796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874433210725051:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:32.918460Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874411735886365:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:32.918538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:34.067247Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmY1YzQ0YmMtNmYxMGM0MjgtYTY1ZDZhMWQtNzljNDM0Mjk=, ActorId: [1:7439874437505692661:2461], ActorState: ExecuteState, TraceId: 01jd8fzbmtem0bxvbxtp1r3ayp, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 27814, MsgBus: 64773 2024-11-21T23:12:35.806856Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874444657130208:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011b5/r3tmp/tmp8H8aWq/pdisk_1.dat 2024-11-21T23:12:35.934549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:36.069093Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:36.091854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:36.091935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:36.096582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27814, node 2 2024-11-21T23:12:36.306975Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:36.307005Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:36.307014Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:36.307134Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64773 TClient is connected to server localhost:64773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:36.958621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:36.967578Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:12:39.921192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874461836999820:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:39.921265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:39.923038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874461836999856:230 ... oreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:12:43.927857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:12:43.927885Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:12:43.928062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:12:43.928094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:12:43.928742Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:12:43.928778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:12:43.928872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:12:43.928901Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:12:43.929080Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:12:43.929109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:12:43.929197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:12:43.929231Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:12:43.929294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:12:43.929323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:12:43.929373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:12:43.929401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:12:43.929765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:12:43.929811Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:12:43.929997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:12:43.930027Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:12:43.930176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:12:43.930207Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:12:43.934029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:12:43.934124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:12:43.934303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:12:43.934333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:12:44.221225Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:12:44.221306Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:12:44.221424Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:12:44.221457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:12:44.221657Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:12:44.221687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:12:44.221776Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:12:44.221808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:12:44.221866Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:12:44.221893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:12:44.221935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:12:44.221970Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:12:44.222292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:12:44.222328Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:12:44.223024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:12:44.223074Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:12:44.223273Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:12:44.223305Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:12:44.223515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:12:44.223546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:12:44.223670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:12:44.223696Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:12:46.299283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;local_tx_no=4;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037936;tx_state=complete;fline=interaction.h:353;batch=Col1: [ 2 ] Col2: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976715665}],"finishes":[{"inc":{"count_include":1},"id":281474976715665}]},"p":{"include":0,"pk":"2;1;"}}]}; 2024-11-21T23:12:46.320232Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjI0MzRhNDAtYTljMjIzN2YtZjRkOGEwYjEtZmE5ZDA4, ActorId: [2:7439874487606806438:2671], ActorState: ExecuteState, TraceId: 01jd8fzqk29s73544r39texyvc, Create QueryResponse for error on request, msg: 2024-11-21T23:12:46.324468Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037936;tx_state=complete;fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715670; 2024-11-21T23:12:46.325308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[2:7439874466131967688:2326];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:111;event=abort;tx_id=281474976715665;problem=finished; 2024-11-21T23:12:46.325381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037936;self_id=[2:7439874466131967688:2326];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:128;event=abort;tx_id=281474976715665;problem=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogTsRangeDescending [GOOD] Test command err: 2024-11-21T23:07:37.168459Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873162435759712:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:37.168512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001dc6/r3tmp/tmpfpSZSV/pdisk_1.dat 2024-11-21T23:07:38.026831Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:38.056699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:38.056797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:38.072631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17550, node 1 2024-11-21T23:07:38.257021Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:07:38.257044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:07:38.257055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:07:38.257159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:38.619891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.630141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:38.630199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:07:38.643250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:07:38.643464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:07:38.643483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:07:38.646209Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:07:38.646237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:07:38.648006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:07:38.648331Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:07:38.658335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230458700, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:07:38.658387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:07:38.658693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:07:38.661036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:38.661210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:38.661256Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:07:38.661353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:07:38.661408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:07:38.661460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:07:38.664859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:07:38.664909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:07:38.664927Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:07:38.665013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:14610 2024-11-21T23:07:39.242860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:07:39.243331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:39.243903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T23:07:39.243948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2024-11-21T23:07:39.244032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T23:07:39.244091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T23:07:39.244135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T23:07:39.244203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T23:07:39.244516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T23:07:39.245743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2024-11-21T23:07:39.246012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:07:39.246033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:39.246175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:07:39.246205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T23:07:39.248775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2024-11-21T23:07:39.248952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2024-11-21T23:07:39.249223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:07:39.249262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:07:39.249425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T23:07:39.249518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:07:39.249541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439873166730727618:2392], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2024-11-21T23:07:39.249559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439873166730727618:2392], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2024-11-21T23:07:39.249605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:07:39.249668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet72057594046644480 waiting... 2024-11-21T23:07:39.253843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChann ... execution finished, don't wait for ack delivery in input channelId: 40, seqNo: [1] 2024-11-21T23:12:38.554441Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 41, seqNo: [1] 2024-11-21T23:12:38.554460Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 42, seqNo: [1] 2024-11-21T23:12:38.554481Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 43, seqNo: [1] 2024-11-21T23:12:38.554502Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 44, seqNo: [1] 2024-11-21T23:12:38.554523Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 45, seqNo: [1] 2024-11-21T23:12:38.554543Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 46, seqNo: [1] 2024-11-21T23:12:38.554563Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 47, seqNo: [1] 2024-11-21T23:12:38.554583Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 48, seqNo: [1] 2024-11-21T23:12:38.554604Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 49, seqNo: [1] 2024-11-21T23:12:38.554623Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 50, seqNo: [1] 2024-11-21T23:12:38.554643Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 51, seqNo: [1] 2024-11-21T23:12:38.554662Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 52, seqNo: [1] 2024-11-21T23:12:38.554681Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 53, seqNo: [1] 2024-11-21T23:12:38.554703Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 54, seqNo: [1] 2024-11-21T23:12:38.554724Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 55, seqNo: [1] 2024-11-21T23:12:38.554746Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 56, seqNo: [1] 2024-11-21T23:12:38.554768Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 57, seqNo: [1] 2024-11-21T23:12:38.554787Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 58, seqNo: [1] 2024-11-21T23:12:38.554805Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 59, seqNo: [1] 2024-11-21T23:12:38.554826Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 60, seqNo: [1] 2024-11-21T23:12:38.554846Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 61, seqNo: [1] 2024-11-21T23:12:38.554866Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 62, seqNo: [1] 2024-11-21T23:12:38.554888Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 63, seqNo: [1] 2024-11-21T23:12:38.554908Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished, don't wait for ack delivery in input channelId: 64, seqNo: [1] 2024-11-21T23:12:38.554939Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. Tasks execution finished 2024-11-21T23:12:38.554988Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7439874452873505347:3690], TxId: 281474976715770, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd8fzebfd5c4km1nw499j09a. SessionId : ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T23:12:38.555255Z node 28 :KQP_COMPUTE DEBUG: TxId: 281474976715770, task: 65. pass away 2024-11-21T23:12:38.555540Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976715770;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:12:38.555539Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439874452873505259:3614] TxId: 281474976715770. Ctx: { TraceId: 01jd8fzebfd5c4km1nw499j09a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [28:7439874452873505347:3690], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 47437 DurationUs: 488000 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 668 FinishTimeMs: 1732230758553 ComputeCpuTimeUs: 163 BuildCpuTimeUs: 505 WaitInputTimeUs: 222893 HostName: "ghrun-uc25mmfz34" NodeId: 28 StartTimeMs: 1732230758065 } MaxMemoryUsage: 1048576 } 2024-11-21T23:12:38.555630Z node 28 :KQP_EXECUTER INFO: TxId: 281474976715770. Ctx: { TraceId: 01jd8fzebfd5c4km1nw499j09a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7439874452873505347:3690] 2024-11-21T23:12:38.555841Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439874452873505259:3614] TxId: 281474976715770. Ctx: { TraceId: 01jd8fzebfd5c4km1nw499j09a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:12:38.555841Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7439874452873505347:3690], TxId: 281474976715770, task: 65. Ctx: { CustomerSuppliedId : . TraceId : 01jd8fzebfd5c4km1nw499j09a. SessionId : ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [28:7439874452873505259:3614] TaskId: 65 Stats: CpuTimeUs: 47437 DurationUs: 488000 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 668 FinishTimeMs: 1732230758553 ComputeCpuTimeUs: 163 BuildCpuTimeUs: 505 WaitInputTimeUs: 222893 HostName: "ghrun-uc25mmfz34" NodeId: 28 StartTimeMs: 1732230758065 } MaxMemoryUsage: 1048576 2024-11-21T23:12:38.555943Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7439874452873505259:3614] TxId: 281474976715770. Ctx: { TraceId: 01jd8fzebfd5c4km1nw499j09a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.176537s ReadRows: 0 ReadBytes: 0 ru: 117 rate limiter was not found force flag: 1 2024-11-21T23:12:38.556070Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, ActorId: [28:7439874448578537904:3614], ActorState: ExecuteState, TraceId: 01jd8fzebfd5c4km1nw499j09a, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2024-11-21T23:12:38.556636Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, ActorId: [28:7439874448578537904:3614], ActorState: ExecuteState, TraceId: 01jd8fzebfd5c4km1nw499j09a, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 753.714 QueriesCount: 1 2024-11-21T23:12:38.556746Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, ActorId: [28:7439874448578537904:3614], ActorState: ExecuteState, TraceId: 01jd8fzebfd5c4km1nw499j09a, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2024-11-21T23:12:38.556921Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, ActorId: [28:7439874448578537904:3614], ActorState: ExecuteState, TraceId: 01jd8fzebfd5c4km1nw499j09a, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2024-11-21T23:12:38.556988Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, ActorId: [28:7439874448578537904:3614], ActorState: ExecuteState, TraceId: 01jd8fzebfd5c4km1nw499j09a, EndCleanup, isFinal: 1 2024-11-21T23:12:38.557093Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, ActorId: [28:7439874448578537904:3614], ActorState: ExecuteState, TraceId: 01jd8fzebfd5c4km1nw499j09a, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7439874379859056451:2256] 2024-11-21T23:12:38.557159Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, ActorId: [28:7439874448578537904:3614], ActorState: unknown state, TraceId: 01jd8fzebfd5c4km1nw499j09a, Cleanup temp tables: 0 RESULT: [] --------------------- STATS: total CPU: 3161 duration: 1613 usec cpu: 1613 usec duration: 726964 usec cpu: 747793 usec { name: "/Root/OlapStore/log1" } 2024-11-21T23:12:38.564235Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230757000, txId: 18446744073709551615] shutting down 2024-11-21T23:12:38.564465Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MjNiMDI1YWMtNDM0Yzk4NWUtYjMyMDFkMmUtMjZmMzRjMzI=, ActorId: [28:7439874448578537904:3614], ActorState: unknown state, TraceId: 01jd8fzebfd5c4km1nw499j09a, Session actor destroyed 2024-11-21T23:12:38.583085Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[28:7439874388448991885:2292];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2024-11-21T23:12:38.583227Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[28:7439874388448991889:2293];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2024-11-21T23:12:38.695386Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[28:7439874388448991889:2293];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T23:12:38.695575Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[28:7439874388448991885:2292];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T23:12:38.695694Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[28:7439874388448991880:2291];fline=actor.cpp:33;event=skip_flush_writing; 2024-11-21T23:12:38.700218Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[28:7439874388448991879:2290];fline=actor.cpp:33;event=skip_flush_writing; >> KqpTx::SnapshotRO [GOOD] |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |83.7%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2024-11-21T23:12:27.665933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:27.666380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:27.666501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002730/r3tmp/tmpsq7eKh/pdisk_1.dat 2024-11-21T23:12:28.472802Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3766, node 1 2024-11-21T23:12:28.896555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:28.897505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:28.897585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:28.898153Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:12:28.945913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:12:29.067436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:29.068439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:29.101090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6854 2024-11-21T23:12:29.978043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:12:34.000858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:34.000983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:34.062168Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:12:34.066992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:34.378777Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:34.470148Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:12:34.470287Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:12:34.511823Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:12:34.513054Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:12:34.513292Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:12:34.513360Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:12:34.513442Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:12:34.513498Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:12:34.513566Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:12:34.513625Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:12:34.514103Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:12:34.781577Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:12:34.781713Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:12:34.807750Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1777:2563] 2024-11-21T23:12:34.820523Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1802:2575] 2024-11-21T23:12:34.821054Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1802:2575], schemeshard id = 72075186224037889 2024-11-21T23:12:34.843478Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T23:12:34.891100Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:12:34.891174Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:12:34.891253Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T23:12:34.895839Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:34.895958Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:34.906844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:12:34.915994Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:12:34.916180Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:12:34.956756Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:12:34.974341Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:35.031177Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:12:35.757741Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:12:35.975290Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:12:37.301588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2142:3022], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:37.301754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:37.475012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T23:12:38.020601Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:38.020867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:38.026570Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:38.026946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:38.027091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:38.027236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:38.027366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:38.027496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:38.027710Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:38.027837Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:38.027957Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:38.028085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2289:2843];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:38.112099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:38.112224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:38.112508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:38.112660Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:38.112768Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:38.112927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2295:2845];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Clea ... ::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:12:39.249799Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:12:39.250268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:12:39.250460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:12:39.250740Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:12:39.250797Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:12:39.251001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:12:39.251069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:12:39.251306Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:12:39.251369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:12:39.251527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:12:39.251573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:12:39.252007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:12:39.252084Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:12:39.252222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:12:39.252276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:12:39.252564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:12:39.252626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:12:39.252738Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:12:39.252818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:12:39.252916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:12:39.252959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:12:39.253011Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:12:39.253054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:12:39.253456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:12:39.253537Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:12:39.253737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:12:39.253782Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:12:39.253973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:12:39.254045Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:12:39.254302Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:12:39.254371Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:12:39.254607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:12:39.254656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:12:41.191141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2933:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:41.191379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:41.211944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037889 2024-11-21T23:12:42.410119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3084:3170], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.410320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.436882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037889 waiting actualization: 0/0.000020s 2024-11-21T23:12:45.544147Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3415:3620] 2024-11-21T23:12:45.547162Z node 2 :STATISTICS DEBUG: [72075186224037897] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> TxUsage::WriteToTopic_Demo_33 [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::WithoutPartitionWithSplit |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |83.7%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> TxUsage::WriteToTopic_Demo_44 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 14255, MsgBus: 4647 2024-11-21T23:12:27.748461Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874411489572399:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011c6/r3tmp/tmpdvTLLx/pdisk_1.dat 2024-11-21T23:12:28.007141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:28.170694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:28.170779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:28.179030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14255, node 1 2024-11-21T23:12:28.260845Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:28.261232Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:28.261326Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:28.483139Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:28.483177Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:28.483190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:28.483304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4647 TClient is connected to server localhost:4647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:29.120977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.159421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.333260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.544353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:29.692455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.863123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874428669443106:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:31.863574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.180689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.266543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.305312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.353664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.391654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.460910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:32.553436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874432964410900:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.553532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.554042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874432964410905:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:32.558841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:32.570564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874432964410908:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:32.744861Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874411489572399:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:32.744925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:34.665628Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ0MzRjMjgtNjlhYmYwMDctZDM1YjNhY2EtYTM4OWFlMGM=, ActorId: [1:7439874437259378521:2462], ActorState: ExecuteState, TraceId: 01jd8fzcad3fg1twx1shan3pe6, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 5505, MsgBus: 16926 2024-11-21T23:12:39.759968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:39.760182Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:12:39.760373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011c6/r3tmp/tmpFUTiUy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5505, node 2 2024-11-21T23:12:40.489315Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:40.490378Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:40.490450Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:40.490490Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:40.490763Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:12:40.544089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:40.544234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:40.556166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16926 TClient is connected to server localhost:16926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:41.109371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:41.186974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:41.592155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.224331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.762373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:43.528806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1729:3349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:43.529165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:43.644822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:43.947493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:44.309374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:44.666330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:44.967312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.413521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.844768Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2295:3788], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.844912Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.845263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2300:3793], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.860872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:46.156200Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2302:3795], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:12:47.879988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:12:48.156148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:12:48.628243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 18136, MsgBus: 62576 2024-11-21T23:12:38.707990Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874455696981074:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:38.708022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00115c/r3tmp/tmpJ0ySz6/pdisk_1.dat 2024-11-21T23:12:39.794245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:39.807683Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:39.819699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:39.819789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18136, node 1 2024-11-21T23:12:39.832348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:40.039069Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:40.039105Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:40.039111Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:40.039208Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62576 TClient is connected to server localhost:62576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:41.284538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:41.330294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:41.617331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.246762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.454883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:43.708027Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874455696981074:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:43.708081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:45.483324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874485761753859:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.483458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.528609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.591854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.636059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.687423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.763000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.860416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.946574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874485761754361:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.946669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.954578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874485761754366:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.957833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:45.967603Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:12:45.968197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874485761754368:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:49.592156Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWI4MzgyYTgtZWRlYTgyNGYtYWViM2JkZTktMmUzZjQ3N2I=, ActorId: [1:7439874498646656598:2469], ActorState: ExecuteState, TraceId: 01jd8fztsx558qcv9d554bsbh0, Create QueryResponse for error on request, msg:
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 >> TxUsage::WriteToTopic_Demo_16 [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 10204, MsgBus: 15916 2024-11-21T23:12:30.328101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874422293122680:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:30.328262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00117c/r3tmp/tmp55EYWn/pdisk_1.dat 2024-11-21T23:12:30.868951Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10204, node 1 2024-11-21T23:12:31.099274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:31.099366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:31.103433Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:31.103442Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:31.103449Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:31.103544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:12:31.113194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15916 TClient is connected to server localhost:15916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:31.876297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.930147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:32.121544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:32.317632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:32.418471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:34.651433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874439472993424:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:34.664255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:34.707244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:34.783325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:34.825155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:34.871363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:34.907539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:35.029767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:35.132168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874443767961223:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:35.132251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:35.132743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874443767961228:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:35.137299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:35.153239Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:12:35.153540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874443767961230:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:35.341051Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874422293122680:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:35.341455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:39.614291Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2024-11-21T23:12:39.616342Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439874460947831042:2462], Table: `/Root/EightShard` ([72057594046644480:3:1]), SessionActorId: [1:7439874448062928899:2462]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 3]`. ShardID=72075186224037891, Sink=[1:7439874460947831042:2462].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T23:12:39.617004Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439874460947831022:2462], SessionActorId: [1:7439874448062928899:2462], Transaction locks invalidated. Table `/Root/EightShard`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7439874448062928899:2462]. isRollback=0 2024-11-21T23:12:39.617143Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQ1MGI1MjUtYjg5NDZiZTgtY2YxNWY3ZDktNzM4MDk1ZWY=, ActorId: [1:7439874448062928899:2462], ActorState: ExecuteState, TraceId: 01jd8fzgjc0e0kp46vhnr5c2ga, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7439874460947831023:2462] from: [1:7439874460947831022:2462] 2024-11-21T23:12:39.617493Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439874460947831023:2462] TxId: 281474976710673. Ctx: { TraceId: 01jd8fzgjc0e0kp46vhnr5c2ga, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQ1MGI1MjUtYjg5NDZiZTgtY2YxNWY3ZDktNzM4MDk1ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/EightShard`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T23:12:39.620404Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710673; 2024-11-21T23:12:39.620559Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1732230759658 : 281474976710673] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" } 2024-11-21T23:12:39.625667Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQ1MGI1MjUtYjg5NDZiZTgtY2YxNWY3ZDktNzM4MDk1ZWY=, ActorId: [1:7439874448062928899:2462], ActorState: ExecuteState, TraceId: 01jd8fzgjc0e0kp46vhnr5c2ga, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2807, MsgBus: 3368 2024-11-21T23:12:40.805925Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874463986211970:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:40.805983Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00117c/r3tmp/tmpjH1x9h/pdisk_1.dat 2024-11-21T23:12:41.146519Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:41.169817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:41.169898Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:41.171607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2807, node 2 2024-11-21T23:12:41.351012Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:41.351046Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:41.351067Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:41.351177Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3368 TClient is connected to server localhost:3368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:41.899454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.055592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.188132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.561395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.651811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:45.459431Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874485461050159:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.459536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.501742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.552630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.653890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.714306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.807266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.810935Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874463986211970:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:45.811013Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:45.908120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:46.016394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874489756017960:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.016483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.016708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874489756017965:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.021392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:46.063507Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874489756017967:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:52.189181Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjFlNmE1ODMtNDY4MDNmMWYtZmFiZTJlNGEtMzAzY2E5MDg=, ActorId: [2:7439874498345952910:2469], ActorState: ExecuteState, TraceId: 01jd8fzx5y097v3f4djdr991za, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 >> TSubscriberCombinationsTest::MigratedPathRecreation >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_33 [GOOD] Test command err: 2024-11-21T23:10:33.986713Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873920377879433:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:33.986951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:34.309196Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bed/r3tmp/tmpfmM0jR/pdisk_1.dat 2024-11-21T23:10:34.586134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:34.586213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:34.603136Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:34.604150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7557, node 1 2024-11-21T23:10:34.822762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002bed/r3tmp/yandex60AR60.tmp 2024-11-21T23:10:34.822792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002bed/r3tmp/yandex60AR60.tmp 2024-11-21T23:10:34.822963Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002bed/r3tmp/yandex60AR60.tmp 2024-11-21T23:10:34.823063Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:34.908183Z INFO: TTestServer started on Port 4626 GrpcPort 7557 TClient is connected to server localhost:4626 PQClient connected to localhost:7557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:35.308597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:35.334850Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:35.358476Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:35.364743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:10:35.567198Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:35.587043Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:10:37.703109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873937557749259:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:37.708401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:37.711412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873937557749248:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:37.711892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:37.726519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873937557749262:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:38.105148Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873941852716633:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:38.112306Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmJlNzZiZmEtNmU5ODlmM2UtZDdlYzNmZTQtOWY3MDM3MWE=, ActorId: [1:7439873937557749245:2307], ActorState: ExecuteState, TraceId: 01jd8fvt3hcyy3t5f3twyzhyeq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:38.118732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:38.123443Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:38.159163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:38.287245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873941852716903:2620] 2024-11-21T23:10:38.985270Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873920377879433:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:38.985339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:44.779179Z :WriteToTopic_Demo_3 INFO: TTopicSdkTestSetup started 2024-11-21T23:10:44.813593Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:44.830665Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873967622521010:2813] connected; active server actors: 1 2024-11-21T23:10:44.830998Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:44.831665Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:44.831830Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:44.832462Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:44.852523Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:44.852802Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:44.853334Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:44.853527Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:44.853569Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:44.853583Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:44.853597Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:44.853626Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:44.853660Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:44.853681Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:44.865753Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:44.865780Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:44.865844Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873967622521033:2436], now have 1 active actors on pipe 2024-11-21T23:10:44.878022Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:44.878054Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873967622521009:2812], now have 1 active actors on pipe 2024-11-21T23:10:44.889346Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439873924672846977 RawX2: 4294969487 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelP ... tion 0 2024-11-21T23:12:52.092462Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Write in transaction. Partition: 0, WriteId: { NodeId: 9 KeyId: 281474976710676 }, NeedSupportivePartition: 0 2024-11-21T23:12:52.092506Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message topic: topic_A partition: 0 SourceId: '\0test-message_group_id' SeqNo: 2 partNo : 0 messageNo: 1 size 16457 offset: -1 2024-11-21T23:12:52.092718Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976710676}, 100001} part blob processing sourceId '\0test-message_group_id' seqNo 2 partNo 0 2024-11-21T23:12:52.093955Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976710676}, 100001} part blob complete sourceId '\0test-message_group_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 16546 count 1 nextOffset 1 batches 1 2024-11-21T23:12:52.094801Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] Add new write blob: topic 'topic_A' partition {0, {9, 281474976710676}, 100001} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100001_00000000000000000000_00000_0000000001_00000| size 16536 WTime 1732230772094 2024-11-21T23:12:52.095080Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:12:52.106991Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 16479 2024-11-21T23:12:52.107064Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {0, {9, 281474976710676}, 100001} 2024-11-21T23:12:52.107121Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {9, 281474976710676}, 100001}, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2024-11-21T23:12:52.107358Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976710676}, 100001} user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:12:52.107393Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] Topic 'topic_A' partition {0, {9, 281474976710676}, 100001} user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T23:12:52.107476Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 1 requestId: cookie: 2 2024-11-21T23:12:52.107607Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T23:12:52.108122Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] read cookie 0 Topic 'topic_A' partition {0, {9, 281474976710676}, 100001} user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T23:12:52.108156Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T23:12:52.108228Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T23:12:52.108251Z node 9 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T23:12:52.108325Z node 9 :PERSQUEUE DEBUG: Topic 'topic_A' partition {0, {9, 281474976710676}, 100001} user test-consumer readTimeStamp done, result 1732230772087 queuesize 0 startOffset 0 2024-11-21T23:12:52.109117Z :DEBUG: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:12:52.109319Z :DEBUG: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 2 written_in_tx { } } write_statistics { persisting_time { nanos: 13000000 } min_queue_wait_time { nanos: 5000000 } max_queue_wait_time { nanos: 5000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:12:52.109366Z :DEBUG: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] OnAck: seqNo=2, txId=01jd8fzxab3fencg587wncjvmg, WriteCount=1, AckCount=1 2024-11-21T23:12:52.110813Z :DEBUG: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] Write session: acknoledged message 2 2024-11-21T23:12:52.130832Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439874517899220695 RawX2: 4503638282078649 } TxId: 281474976710677 Data { Operations { PartitionId: 0 Path: "/Root/topic_A" SupportivePartition: 100001 } Op: Commit SendingShards: 72075186224037894 ReceivingShards: 72075186224037894 Immediate: true WriteId { NodeId: 9 KeyId: 281474976710676 } } 2024-11-21T23:12:52.130885Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PartitionId {0, {9, 281474976710676}, 100001} for WriteId {9, 281474976710676} 2024-11-21T23:12:52.130914Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] TxId 281474976710677 has WriteId {9, 281474976710676} 2024-11-21T23:12:52.130935Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] immediate transaction 2024-11-21T23:12:52.131146Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2024-11-21T23:12:52.131208Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2024-11-21T23:12:52.131246Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=1 2024-11-21T23:12:52.131284Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Head=Offset 0 PartNo 0 PackedSize 16536 count 1 nextOffset 1 batches 1, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2024-11-21T23:12:52.131417Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 2 partNo 0 2024-11-21T23:12:52.137735Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 16546 count 1 nextOffset 2 batches 1 2024-11-21T23:12:52.137821Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2024-11-21T23:12:52.138833Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Add new write blob: topic 'topic_A' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 16536 WTime 1732230772138 2024-11-21T23:12:52.139150Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:12:52.142422Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:12:52.143571Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvTransactionCompleted WriteId {9, 281474976710676} 2024-11-21T23:12:52.143607Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvPQ::TEvDeletePartition to partition {0, {9, 281474976710676}, 100001} 2024-11-21T23:12:52.143665Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: {0, {9, 281474976710676}, 100001}, State: StateIdle] Handle TEvPQ::TEvDeletePartition 2024-11-21T23:12:52.144565Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvLongTxService::TEvLockStatus LockId: 281474976710676 LockNode: 9 Status: STATUS_NOT_FOUND 2024-11-21T23:12:52.144597Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] there is already a transaction TxId 281474976710677 for WriteId {9, 281474976710676} 2024-11-21T23:12:52.149531Z :INFO: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T23:12:52.149565Z :INFO: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T23:12:52.149614Z :DEBUG: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T23:12:52.150331Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvDeletePartitionDone {0, {9, 281474976710676}, 100001} 2024-11-21T23:12:52.150469Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] send TEvUnsubscribeLock for WriteId {9, 281474976710676} 2024-11-21T23:12:52.150497Z node 9 :PERSQUEUE WARN: [PQ: 72075186224037894] Unknown transaction 281474976710677 2024-11-21T23:12:52.150539Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2024-11-21T23:12:52.151447Z :INFO: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T23:12:52.151485Z :DEBUG: [/Root] SessionId [test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T23:12:52.152013Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2024-11-21T23:12:52.153425Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0 grpc closed 2024-11-21T23:12:52.153446Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|a0198541-475587e3-f42fcff8-105ac7fd_0 is DEAD 2024-11-21T23:12:52.171362Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:12:52.171423Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:12:52.171471Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:12:52.178543Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:12:52.178604Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439874513604253295:2470] destroyed 2024-11-21T23:12:52.178635Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:12:52.178661Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439874517899220671:2470] destroyed 2024-11-21T23:12:52.178676Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:12:52.178700Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439874513604253298:2470] destroyed 2024-11-21T23:12:52.181527Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> TxUsage::WriteToTopic_Demo_17 >> TSubscriberTest::Boot [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSinkTx::OlapExplicitTcl [GOOD] >> KqpSinkTx::OlapInteractive |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> KqpSinkLocks::DifferentKeyUpdate >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapInvalidateOnError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2024-11-21T23:12:55.323577Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T23:12:55.323682Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T23:12:55.323905Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T23:12:55.323942Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T23:12:55.323998Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:33:2065] 2024-11-21T23:12:55.324053Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2024-11-21T23:12:55.324291Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:33:2065] 2024-11-21T23:12:55.324324Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2024-11-21T23:12:55.324413Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:12:55.324864Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:39:2067] 2024-11-21T23:12:55.324897Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2024-11-21T23:12:55.325044Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:39:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:12:55.325191Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:40:2067] 2024-11-21T23:12:55.325213Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2024-11-21T23:12:55.325250Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:40:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:12:55.325371Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2067] 2024-11-21T23:12:55.325395Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2024-11-21T23:12:55.325425Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:41:2067], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:12:55.325477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2024-11-21T23:12:55.325529Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:39:2067] 2024-11-21T23:12:55.325595Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2024-11-21T23:12:55.325626Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:40:2067] 2024-11-21T23:12:55.325666Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2024-11-21T23:12:55.325703Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2067] 2024-11-21T23:12:55.325790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:36:2067] 2024-11-21T23:12:55.325891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:37:2067] 2024-11-21T23:12:55.325936Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Set up state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:12:55.325985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2067] 2024-11-21T23:12:55.326019Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2067][/root/db/dir_inside] Ignore empty state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2024-11-21T23:12:55.326231Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:32:2064], cookie# 0, event size# 118 2024-11-21T23:12:55.326269Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2024-11-21T23:12:55.332371Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T23:12:55.332627Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2024-11-21T23:12:55.332695Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:39:2067] 2024-11-21T23:12:55.332755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:36:2067] 2024-11-21T23:12:55.332818Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Update to strong state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2024-11-21T23:12:55.333095Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:33:2065], cookie# 0, event size# 117 2024-11-21T23:12:55.333131Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2024-11-21T23:12:55.333191Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2024-11-21T23:12:55.333289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2024-11-21T23:12:55.333349Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:40:2067] 2024-11-21T23:12:55.333407Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:37:2067] 2024-11-21T23:12:55.333463Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/db/dir_inside] Path was updated to new version: owner# [1:34:2066], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:12:55.813670Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:12:55.814354Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2024-11-21T23:12:55.814947Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2024-11-21T23:12:55.815000Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2024-11-21T23:12:55.815057Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:33:2064] 2024-11-21T23:12:55.815141Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:34:2064] 2024-11-21T23:12:55.815190Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:32:2064][path] Set up state: owner# [3:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:12:55.815239Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2064] 2024-11-21T23:12:55.815270Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:32:2064][path] Ignore empty state: owner# [3:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> KqpQueryPerf::DeleteOn-QueryService |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 28308, MsgBus: 26683 2024-11-21T23:12:39.827001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874459860763591:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:39.827269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001132/r3tmp/tmpGu7pHp/pdisk_1.dat 2024-11-21T23:12:40.743338Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:40.785891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:40.786208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:40.794558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:40.797329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28308, node 1 2024-11-21T23:12:41.063011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:41.063042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:41.063050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:41.063158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26683 TClient is connected to server localhost:26683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:42.123050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.150830Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:42.170406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.350216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.560304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.656996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:44.814511Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874459860763591:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:44.814591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:44.827405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874481335601646:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:44.827537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.269385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.358122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.401847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.453164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.513269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.565136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.687815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874485630569445:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.687956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.688192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874485630569451:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.692689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:45.741552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874485630569453:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:48.388425Z node 1 :GLOBAL ERROR: fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710673; 2024-11-21T23:12:48.390791Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439874498515471874:2467], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7439874494220504448:2467]Got LOCKS BROKEN for table `[OwnerId: 72057594046644480, LocalPathId: 2]`. ShardID=72075186224037888, Sink=[1:7439874498515471874:2467].{
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T23:12:48.400096Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439874498515471867:2467], SessionActorId: [1:7439874494220504448:2467], Transaction locks invalidated. Table `/Root/TwoShard`. {
: Fatal: Operation is aborting because locks are not valid }. statusCode=ABORTED. subIssues=
: Fatal: Operation is aborting because locks are not valid . sessionActorId=[1:7439874494220504448:2467]. isRollback=0 2024-11-21T23:12:48.400267Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmY4NTQwOTAtOWJhZDNlNTktZDk0OTc0YzMtYmM4ZjcwZjE=, ActorId: [1:7439874494220504448:2467], ActorState: ExecuteState, TraceId: 01jd8fzspg0s6tqtrmgyn1yjqw, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7439874498515471868:2467] from: [1:7439874498515471867:2467] 2024-11-21T23:12:48.400512Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439874498515471868:2467] TxId: 281474976710673. Ctx: { TraceId: 01jd8fzspg0s6tqtrmgyn1yjqw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY4NTQwOTAtOWJhZDNlNTktZDk0OTc0YzMtYmM4ZjcwZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table `/Root/TwoShard`. {
: Fatal: Operation is aborting because locks are not valid };
: Fatal: Operation is aborting because locks are not valid } 2024-11-21T23:12:48.406175Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmY4NTQwOTAtOWJhZDNlNTktZDk0OTc0YzMtYmM4ZjcwZjE=, ActorId: [1:7439874494220504448:2467], ActorState: ExecuteState, TraceId: 01jd8fzspg0s6tqtrmgyn1yjqw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 5622, MsgBus: 28390 2024-11-21T23:12:49.265221Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874504537517574:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:49.265408Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001132/r3tmp/tmpqg4ouB/pdisk_1.dat 2024-11-21T23:12:49.412354Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:49.423202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:49.423304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:49.424930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5622, node 2 2024-11-21T23:12:49.498445Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:49.498473Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:49.498482Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:49.498578Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28390 TClient is connected to server localhost:28390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:50.043665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:50.077895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:50.163255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:50.370235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:50.459383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:53.006589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874521717388479:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:53.006663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:53.068445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:53.156202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:53.237793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:53.284252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:53.338759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:53.392194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:53.488122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874521717388985:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:53.488203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:53.488618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874521717388990:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:53.492500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:53.508795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874521717388992:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:12:54.299614Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874504537517574:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:54.299754Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:55.600478Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTYxYjExZTgtZmUzNDU2ZGMtZDM4NjliMzQtMzExNmJkZGE=, ActorId: [2:7439874526012356602:2460], ActorState: ExecuteState, TraceId: 01jd8g00pjb1yv7yzwqfd9j1da, Create QueryResponse for error on request, msg: >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap >> TxUsage::WriteToTopic_Demo_24 [GOOD] >> KqpSinkTx::DeferredEffects [GOOD] >> KqpSinkTx::ExplicitTcl >> BsControllerTest::SelfHealBlock4Plus2 >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 28760, MsgBus: 6231 2024-11-21T23:12:39.967219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874463080519546:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:40.000216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00111a/r3tmp/tmpxsJPu5/pdisk_1.dat 2024-11-21T23:12:40.631722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:40.631822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:40.659514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:40.720774Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28760, node 1 2024-11-21T23:12:40.931382Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:40.931415Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:40.931422Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:40.931518Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6231 TClient is connected to server localhost:6231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:42.088765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.168748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.495901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.746267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.855895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:44.970542Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874463080519546:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:44.970621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:45.288357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874488850325020:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.288449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.658678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.702982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.781024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.842786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.904588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:46.128964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:46.254544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874493145292830:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.254660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.262534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874493145292835:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.275148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:46.297806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874493145292839:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 29987, MsgBus: 10458 2024-11-21T23:12:50.077278Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874508938664276:2197];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00111a/r3tmp/tmpxoyVAB/pdisk_1.dat 2024-11-21T23:12:50.103502Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:50.175053Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29987, node 2 2024-11-21T23:12:50.192462Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:50.192536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:50.204100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:50.291005Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:50.291028Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:50.291036Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:50.291126Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10458 TClient is connected to server localhost:10458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:12:50.766664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:12:50.783368Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:50.801151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:12:50.889987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:51.155293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:51.295850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:54.071780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874526118535015:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:54.071888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:54.181423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:54.241609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:54.299116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:54.346637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:54.427091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:54.501914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:54.560916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874526118535517:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:54.561161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:54.561234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874526118535522:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:54.564612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:54.583653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874526118535524:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:12:55.074886Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874508938664276:2197];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:55.075033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> LocalPartition::DiscoveryHang [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSinkMvcc::OlapMultiSinks >> SelfHealActorTest::SingleErrorDisk [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> BsControllerTest::TestLocalBrokenRelocation |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::CancelledExportEndTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 7900, MsgBus: 61659 2024-11-21T23:12:39.695225Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874462639437789:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:39.695271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001105/r3tmp/tmp6yNlWk/pdisk_1.dat 2024-11-21T23:12:40.583094Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:40.611290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:40.611386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:40.619846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7900, node 1 2024-11-21T23:12:40.897823Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:40.897847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:40.897855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:40.897955Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61659 TClient is connected to server localhost:61659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:41.996248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.068502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.310665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.570619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.751613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:44.695911Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874462639437789:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:44.695992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:44.847909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874484114275898:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:44.848035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.384351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.482316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.530276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.580788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.679247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.745935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:45.867147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874488409243706:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.867267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.870771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874488409243711:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.876047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:45.894158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874488409243713:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:48.601969Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFiYTg5NGYtMzcxZDg5ZmMtNWM2YmE5OTAtYjE4YzhlYjQ=, ActorId: [1:7439874501294145945:2469], ActorState: ExecuteState, TraceId: 01jd8fzsvs0h8f9zrwd5w1rvvs, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 18056, MsgBus: 31703 2024-11-21T23:12:49.659529Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874502870623945:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:49.659578Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001105/r3tmp/tmpxOpXvJ/pdisk_1.dat 2024-11-21T23:12:50.324020Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:50.368562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:50.368668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:50.371667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18056, node 2 2024-11-21T23:12:50.685267Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:50.685291Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:50.685302Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:50.685406Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31703 TClient is connected to server localhost:31703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:52.019890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:52.028015Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:52.042426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:52.150013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:52.416730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:52.525267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:54.659701Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874502870623945:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:54.659781Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:55.372142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874528640429431:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:55.372244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:55.416941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:55.471369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:55.546281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:55.632885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:55.685904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:55.804462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:55.916577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874528640429944:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:55.916676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:55.916921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874528640429949:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:55.921099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:55.937018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874528640429951:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:58.445115Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzdhNGUyOTItMTViNGJjY2ItNmE3MTliMTItNTdmMjcwMmY=, ActorId: [2:7439874537230364868:2465], ActorState: ExecuteState, TraceId: 01jd8g037fe583yekxc9zzk48k, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken >> TxUsage::WriteToTopic_Demo_25 |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |83.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> TS3WrapperTests::AbortUnknownUpload >> TExportToS3Tests::CancelledExportEndTime [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects >> TS3WrapperTests::AbortUnknownUpload [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2024-11-21T23:13:03.003589Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 34DA6C59-6A25-4A21-8A29-CB3328CA77A3, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:15378 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8C1BB789-48E3-4885-B452-870907FADE64 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2024-11-21T23:13:03.017482Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 34DA6C59-6A25-4A21-8A29-CB3328CA77A3, response# >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:29.418023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:29.418136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.418182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:29.418225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:29.418274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:29.418316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:29.418562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:29.418923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:29.508646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:29.508699Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:29.521147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:29.525125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:29.525323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:29.531318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:29.532147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:29.532706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:29.533041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:29.536928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.538221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:29.538278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.538526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:29.538586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:29.538633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:29.538750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.544847Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:29.667122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:29.667338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.667506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:29.667759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:29.667806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.671447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:29.671546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:29.671726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.671807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:29.671838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:29.671863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:29.675295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.675340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:29.675367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:29.679724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.679792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.679877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:29.679979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:29.684445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:29.687607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:29.687885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:29.688909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:29.689075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:29.689132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:29.689433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:29.689486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:29.689661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:29.689734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:29.695779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:29.695832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:29.696028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:29.696071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:29.696461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:29.696505Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:29.696619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:29.696662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:29.696710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:29.696755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:29.696808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:29.696839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:29.696911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:29.696946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:29.696977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:29.702096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:29.702240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:29.702281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:29.702323Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:29.702365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:29.702517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... arrier }, at tablet# 72057594046678944 2024-11-21T23:13:02.360431Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710758:0 240 -> 240 2024-11-21T23:13:02.363256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2024-11-21T23:13:02.363317Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710758:0 ProgressState 2024-11-21T23:13:02.363449Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2024-11-21T23:13:02.363493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2024-11-21T23:13:02.363562Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: true 2024-11-21T23:13:02.363675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710758 2024-11-21T23:13:02.363730Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2024-11-21T23:13:02.363783Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2024-11-21T23:13:02.363825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2024-11-21T23:13:02.364005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:13:02.364053Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:13:02.366272Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2024-11-21T23:13:02.366363Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2024-11-21T23:13:02.376214Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2024-11-21T23:13:02.395719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:13:02.395789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:13:02.405562Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/export-102" OperationType: ESchemeOpBackup Backup { TableName: "0" NumberOfRetries: 0 S3Settings { Endpoint: "localhost:1902" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:02.406317Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:02.406657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:13:02.407086Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:02.407155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:02.408657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:13:02.408716Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:13:02.416581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:02.416856Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2024-11-21T23:13:02.417278Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2024-11-21T23:13:02.417381Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2024-11-21T23:13:02.417905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:02.417982Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet72057594046678944 2024-11-21T23:13:02.418050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2024-11-21T23:13:02.418097Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2024-11-21T23:13:02.429427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2024-11-21T23:13:02.429529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2024-11-21T23:13:02.430194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:02.430261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:02.430482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T23:13:02.437104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2024-11-21T23:13:02.437462Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:02.437512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:02.437683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2024-11-21T23:13:02.438569Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2024-11-21T23:13:02.438739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2024-11-21T23:13:02.440248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T23:13:02.440434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2024-11-21T23:13:02.450178Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T23:13:02.452006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:13:02.452078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:544:2506] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 3191, MsgBus: 12747 2024-11-21T23:12:32.282775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:32.288154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:32.288347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011ad/r3tmp/tmp6leCZS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3191, node 1 2024-11-21T23:12:32.906965Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:32.911221Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:32.911323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:32.911362Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:32.911744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:12:32.965882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:32.966020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:32.981566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12747 TClient is connected to server localhost:12747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:33.457207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:33.628303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:34.107880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:34.743252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:35.179457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:36.132834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1733:3352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:36.133054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:36.161141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:36.367519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:36.707806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:37.034903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:37.348515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:37.778821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:38.226687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2298:3792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:38.226785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:38.227227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2303:3797], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:38.241619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:38.520637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2305:3799], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:12:40.008570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:12:40.503439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:12:41.021931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9050, MsgBus: 32264 2024-11-21T23:12:51.668518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:51.668736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:12:51.668973Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011ad/r3tmp/tmpkv2NqA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9050, node 2 2024-11-21T23:12:52.443876Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:52.444708Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:52.444765Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:52.444799Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:52.445016Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:12:52.495148Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:52.495298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:52.507228Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32264 TClient is connected to server localhost:32264 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:52.996067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:53.177763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:53.739459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:54.256998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:54.717292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:55.422721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1729:3349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:55.423102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:55.458377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:55.841530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:56.137535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:56.490917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:56.843589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:57.266705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:57.644730Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2298:3790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:57.644846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:57.645150Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2303:3795], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:57.657014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:57.896486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2305:3797], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:12:59.234700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:12:59.519609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:13:00.107896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapNamedStatement |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2024-11-21T23:13:00.030622Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T23:13:00.030679Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T23:13:00.030746Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T23:13:00.030776Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T23:13:00.030838Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T23:13:00.030861Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T23:13:00.030899Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T23:13:00.030923Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T23:13:00.030965Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T23:13:00.030990Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T23:13:00.031035Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T23:13:00.031060Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T23:13:00.031096Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T23:13:00.031117Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T23:13:00.031150Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T23:13:00.031173Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T23:13:00.031223Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T23:13:00.031249Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T23:13:00.031284Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T23:13:00.031305Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T23:13:00.031355Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T23:13:00.031380Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T23:13:00.031416Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T23:13:00.031437Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T23:13:00.031493Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T23:13:00.031517Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T23:13:00.031559Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T23:13:00.031579Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T23:13:00.031613Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T23:13:00.031646Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T23:13:00.031686Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T23:13:00.031715Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T23:13:00.031761Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T23:13:00.031781Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T23:13:00.031822Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T23:13:00.031842Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T23:13:00.031876Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T23:13:00.031896Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T23:13:00.031935Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T23:13:00.031955Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T23:13:00.031991Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T23:13:00.032014Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T23:13:00.032054Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T23:13:00.032086Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T23:13:00.032128Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T23:13:00.032151Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T23:13:00.032183Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T23:13:00.032204Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T23:13:00.032254Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T23:13:00.032276Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T23:13:00.032310Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T23:13:00.032332Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T23:13:00.032379Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T23:13:00.032411Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T23:13:00.032451Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T23:13:00.032474Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T23:13:00.032514Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T23:13:00.032534Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T23:13:00.032584Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T23:13:00.032604Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T23:13:00.032658Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T23:13:00.032678Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T23:13:00.032712Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T23:13:00.032731Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T23:13:00.032773Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T23:13:00.032794Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T23:13:00.032833Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T23:13:00.032852Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T23:13:00.032886Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T23:13:00.032906Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T23:13:00.032943Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T23:13:00.032966Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T23:13:00.049449Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T23:13:00.050851Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T23:13:00.050921Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T23:13:00.050968Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T23:13:00.051006Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T23:13:00.051048Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T23:13:00.051092Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T23:13:00.051134Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T23:13:00.051175Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T23:13:00.051213Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T23:13:00.051250Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T23:13:00.051290Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T23:13:00.051328Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T23:13:00.051363Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T23:13:00.051405Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T23:13:00.051461Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T23:13:00.051510Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T23:13:00.051564Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T23:13:00.051605Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T23:13:00.051653Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T23:13:00.051695Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T23:13:00.051735Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T23:13:00.051772Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T23:13:00.051812Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T23:13:00.051863Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T23:13:00.051909Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T23:13:00.051951Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T23:13:00.052003Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T23:13:00.052046Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T23:13:00.052084Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T23:13:00.052122Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T23:13:00.052160Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T23:13:00.052212Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T23:13:00.052267Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T23:13:00.052306Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2024-11-21T23:13:03.639687Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2024-11-21T23:13:03.639740Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2024-11-21T23:13:03.639784Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2024-11-21T23:13:03.639830Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2024-11-21T23:13:03.640390Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T23:13:03.640437Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2024-11-21T23:13:03.640474Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2024-11-21T23:13:03.640524Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2024-11-21T23:13:03.640575Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2024-11-21T23:13:03.640615Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2024-11-21T23:13:03.640654Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2024-11-21T23:13:03.640688Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2024-11-21T23:13:03.640729Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2024-11-21T23:13:03.640765Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2024-11-21T23:13:03.640812Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2024-11-21T23:13:03.640849Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2024-11-21T23:13:03.640885Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2024-11-21T23:13:03.640919Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2024-11-21T23:13:03.640964Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2024-11-21T23:13:03.641005Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2024-11-21T23:13:03.641054Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2024-11-21T23:13:03.641576Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2024-11-21T23:13:03.641621Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2024-11-21T23:13:03.641657Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2024-11-21T23:13:03.641698Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2024-11-21T23:13:03.641734Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2024-11-21T23:13:03.641769Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2024-11-21T23:13:03.641804Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2024-11-21T23:13:03.641844Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2024-11-21T23:13:03.641882Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2024-11-21T23:13:03.641937Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2024-11-21T23:13:03.641992Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2024-11-21T23:13:03.642032Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2024-11-21T23:13:03.642496Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2024-11-21T23:13:03.642543Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2024-11-21T23:13:03.642580Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2024-11-21T23:13:03.642618Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2024-11-21T23:13:03.642668Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2024-11-21T23:13:03.642712Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2024-11-21T23:13:03.642752Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2024-11-21T23:13:03.642794Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2024-11-21T23:13:03.642849Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2024-11-21T23:13:03.645874Z 4 01h25m01.734560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:03.646409Z 7 01h25m01.738560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:03.646844Z 7 01h25m01.839560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:03.647270Z 4 01h25m02.124560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:03.647690Z 10 01h25m02.408560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2024-11-21T23:13:03.648081Z 5 01h25m02.835560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:03.648570Z 7 01h25m03.165560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:03.648977Z 4 01h25m03.300560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:03.649346Z 4 01h25m03.353560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:03.649664Z 2 01h25m04.121560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:03.650020Z 10 01h25m04.910560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2024-11-21T23:13:03.650309Z 7 01h25m04.979560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:03.652010Z 8 01h25m05.075560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:03.652529Z 2 01h25m05.280560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:03.652944Z 5 01h25m05.622560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:03.653354Z 10 01h25m06.018560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2024-11-21T23:13:03.654104Z 10 01h25m13.571560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2024-11-21T23:13:03.655145Z 1 01h25m13.572072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.655213Z 1 01h25m13.572072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2024-11-21T23:13:03.656148Z 2 01h25m17.149560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2024-11-21T23:13:03.657170Z 1 01h25m17.150072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.657222Z 1 01h25m17.150072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2024-11-21T23:13:03.657373Z 5 01h25m19.629560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2024-11-21T23:13:03.658093Z 1 01h25m19.630072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.658152Z 1 01h25m19.630072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2024-11-21T23:13:03.658569Z 7 01h25m20.554560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2024-11-21T23:13:03.659234Z 1 01h25m20.555072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.659277Z 1 01h25m20.555072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2024-11-21T23:13:03.659369Z 7 01h25m21.548560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2024-11-21T23:13:03.660042Z 1 01h25m21.549072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.660097Z 1 01h25m21.549072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2024-11-21T23:13:03.660231Z 10 01h25m24.242560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2024-11-21T23:13:03.660923Z 1 01h25m24.243072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.660975Z 1 01h25m24.243072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2024-11-21T23:13:03.661075Z 4 01h25m24.539560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2024-11-21T23:13:03.661887Z 1 01h25m24.540072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.661928Z 1 01h25m24.540072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2024-11-21T23:13:03.662010Z 8 01h25m24.881560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2024-11-21T23:13:03.662794Z 1 01h25m24.882072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.662870Z 1 01h25m24.882072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2024-11-21T23:13:03.663356Z 10 01h25m25.849560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2024-11-21T23:13:03.664083Z 1 01h25m25.850072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.664146Z 1 01h25m25.850072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2024-11-21T23:13:03.664260Z 4 01h25m26.371560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2024-11-21T23:13:03.664994Z 1 01h25m26.372072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.665035Z 1 01h25m26.372072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2024-11-21T23:13:03.665146Z 7 01h25m28.103560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2024-11-21T23:13:03.665836Z 1 01h25m28.104072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.665881Z 1 01h25m28.104072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2024-11-21T23:13:03.665966Z 4 01h25m28.282560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2024-11-21T23:13:03.666658Z 1 01h25m28.283072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.666706Z 1 01h25m28.283072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2024-11-21T23:13:03.668106Z 7 01h25m32.841560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2024-11-21T23:13:03.668914Z 1 01h25m32.842072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.668959Z 1 01h25m32.842072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2024-11-21T23:13:03.669341Z 5 01h25m34.705560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2024-11-21T23:13:03.670190Z 1 01h25m34.706072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.670239Z 1 01h25m34.706072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2024-11-21T23:13:03.670361Z 2 01h25m34.852560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2024-11-21T23:13:03.671109Z 1 01h25m34.853072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.671154Z 1 01h25m34.853072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2024-11-21T23:13:03.672060Z 4 01h25m37.594560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2024-11-21T23:13:03.672737Z 1 01h25m37.595072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:03.672788Z 1 01h25m37.595072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2024-11-21T23:13:03.383062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874562917377057:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:03.383153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012e5/r3tmp/tmpRX4Wpc/pdisk_1.dat 2024-11-21T23:13:03.994543Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:04.028162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:04.028264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:04.030048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:04.223364Z node 1 :GRPC_SERVER DEBUG: [0x51f000028480] stream accepted Name# Session ok# true peer# ipv6:[::1]:33178 2024-11-21T23:13:04.223670Z node 1 :GRPC_SERVER DEBUG: [0x51f000028480] facade attach Name# Session actor# [1:7439874567212344845:2251] peer# ipv6:[::1]:33178 2024-11-21T23:13:04.223909Z node 1 :GRPC_SERVER DEBUG: [0x51f000028480] stream done notification Name# Session ok# true peer# ipv6:[::1]:33178 2024-11-21T23:13:04.223983Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2024-11-21T23:13:04.224704Z node 1 :GRPC_SERVER DEBUG: [0x51f000028480] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2024-11-21T23:13:04.224754Z node 1 :GRPC_SERVER DEBUG: [0x51f000028480] deregistering request Name# Session peer# unknown (finish done) >> KqpUserConstraint::KqpReadNull+UploadNull >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2024-11-21T23:13:05.638285Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874571560753687:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:05.638343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012cd/r3tmp/tmpOoQ6Gt/pdisk_1.dat 2024-11-21T23:13:06.075222Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:06.104909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:06.104985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:06.114277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:06.335123Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream accepted Name# Session ok# true peer# ipv6:[::1]:40228 2024-11-21T23:13:06.335404Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade attach Name# Session actor# [1:7439874575855721269:2251] peer# ipv6:[::1]:40228 2024-11-21T23:13:06.335429Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade write Name# Session data# peer# ipv6:[::1]:40228 2024-11-21T23:13:06.335698Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade write Name# Session data# peer# ipv6:[::1]:40228 grpc status# (0) message# 2024-11-21T23:13:06.336277Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] write finished Name# Session ok# true peer# ipv6:[::1]:40228 2024-11-21T23:13:06.336404Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2024-11-21T23:13:06.336698Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream done notification Name# Session ok# true peer# ipv6:[::1]:40228 2024-11-21T23:13:06.336779Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] write finished Name# Session ok# true peer# ipv6:[::1]:40228 2024-11-21T23:13:06.336803Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream finished Name# Session ok# true peer# ipv6:[::1]:40228 grpc status# (0) message# 2024-11-21T23:13:06.336849Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] deregistering request Name# Session peer# ipv6:[::1]:40228 (finish done) 2024-11-21T23:13:06.336893Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 >> KqpQueryPerf::DeleteOn-QueryService [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15650, MsgBus: 22745 2024-11-21T23:12:58.170850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874544203298641:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:58.171037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00286e/r3tmp/tmpSbr88M/pdisk_1.dat 2024-11-21T23:12:59.263189Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:59.385664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:59.402847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:59.402944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:59.405642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15650, node 1 2024-11-21T23:13:00.466901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:13:00.466922Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:13:00.466936Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:13:00.467027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22745 TClient is connected to server localhost:22745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:13:02.431016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:02.567189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:02.946473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:03.175759Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874544203298641:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:03.175853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:13:03.247147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:03.428869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:04.118074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874569973103947:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:04.118367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.533293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.600017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.665747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.720906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.792981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.864760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.976187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874574268071802:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.976311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.976711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874574268071807:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.982996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:13:06.000006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874574268071809:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 7859, MsgBus: 8909 2024-11-21T23:12:42.098021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:42.109345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:42.109535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00115a/r3tmp/tmpFSEEUB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7859, node 1 2024-11-21T23:12:42.755928Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:42.762938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:42.763026Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:42.763065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:42.763471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:12:42.810262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:42.810409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:42.827041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8909 TClient is connected to server localhost:8909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:43.204682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:43.386024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:43.945564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:44.548903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:44.906503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:46.551516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1729:3348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.551861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.616069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:46.943451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.343522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.759521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:48.233828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:48.917459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:49.254601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2299:3792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:49.254776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:49.255194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2304:3797], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:49.259441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:49.525444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2306:3799], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:12:52.292725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:12:52.648584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:12:53.246910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6318, MsgBus: 18465 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00115a/r3tmp/tmpLMKBMs/pdisk_1.dat 2024-11-21T23:12:56.770595Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:56.819918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:56.819998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:56.823729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:56.835190Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6318, node 2 2024-11-21T23:12:56.841029Z node 2 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:56.891101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:56.891124Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:56.891133Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:56.891222Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18465 TClient is connected to server localhost:18465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:57.418983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:00.265841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874550032615087:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:00.265949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:00.267585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874550032615123:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:00.272135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:13:00.287126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874550032615125:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:13:00.497776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:13:00.610699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:13:02.769421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2024-11-21T23:12:59.601455Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T23:12:59.601510Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T23:12:59.601580Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T23:12:59.601601Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T23:12:59.601636Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T23:12:59.601655Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T23:12:59.601701Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T23:12:59.601724Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T23:12:59.601759Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T23:12:59.601777Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T23:12:59.601824Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T23:12:59.601851Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T23:12:59.601885Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T23:12:59.601910Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T23:12:59.601953Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T23:12:59.601973Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T23:12:59.602001Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T23:12:59.602019Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T23:12:59.602056Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T23:12:59.602075Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T23:12:59.602132Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T23:12:59.602152Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T23:12:59.602190Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T23:12:59.602209Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T23:12:59.602249Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T23:12:59.602272Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T23:12:59.602315Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T23:12:59.602336Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T23:12:59.602367Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T23:12:59.602411Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T23:12:59.602440Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T23:12:59.602470Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T23:12:59.602512Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T23:12:59.602530Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T23:12:59.602597Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T23:12:59.602616Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T23:12:59.602648Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T23:12:59.602666Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T23:12:59.602698Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T23:12:59.602716Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T23:12:59.602750Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T23:12:59.602769Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T23:12:59.602802Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T23:12:59.602819Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T23:12:59.602855Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T23:12:59.602877Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T23:12:59.602924Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T23:12:59.602944Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T23:12:59.602980Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T23:12:59.602998Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T23:12:59.603035Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T23:12:59.603055Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T23:12:59.603100Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T23:12:59.603130Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T23:12:59.603166Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T23:12:59.603187Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T23:12:59.603228Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T23:12:59.603247Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T23:12:59.603288Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T23:12:59.603313Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T23:12:59.603357Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T23:12:59.603378Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T23:12:59.603410Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T23:12:59.603433Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T23:12:59.628283Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2157:49] Status# ERROR ClientId# [1:2157:49] ServerId# [0:0:0] PipeClient# [1:2157:49] 2024-11-21T23:12:59.629466Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2158:37] Status# ERROR ClientId# [2:2158:37] ServerId# [0:0:0] PipeClient# [2:2158:37] 2024-11-21T23:12:59.629520Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2159:37] Status# ERROR ClientId# [3:2159:37] ServerId# [0:0:0] PipeClient# [3:2159:37] 2024-11-21T23:12:59.629555Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2160:37] Status# ERROR ClientId# [4:2160:37] ServerId# [0:0:0] PipeClient# [4:2160:37] 2024-11-21T23:12:59.629593Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2161:37] Status# ERROR ClientId# [5:2161:37] ServerId# [0:0:0] PipeClient# [5:2161:37] 2024-11-21T23:12:59.629646Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2162:37] Status# ERROR ClientId# [6:2162:37] ServerId# [0:0:0] PipeClient# [6:2162:37] 2024-11-21T23:12:59.629683Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2163:37] Status# ERROR ClientId# [7:2163:37] ServerId# [0:0:0] PipeClient# [7:2163:37] 2024-11-21T23:12:59.629723Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2164:37] Status# ERROR ClientId# [8:2164:37] ServerId# [0:0:0] PipeClient# [8:2164:37] 2024-11-21T23:12:59.629758Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2165:37] Status# ERROR ClientId# [9:2165:37] ServerId# [0:0:0] PipeClient# [9:2165:37] 2024-11-21T23:12:59.629802Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2166:37] Status# ERROR ClientId# [10:2166:37] ServerId# [0:0:0] PipeClient# [10:2166:37] 2024-11-21T23:12:59.629838Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2167:37] Status# ERROR ClientId# [11:2167:37] ServerId# [0:0:0] PipeClient# [11:2167:37] 2024-11-21T23:12:59.629877Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2168:37] Status# ERROR ClientId# [12:2168:37] ServerId# [0:0:0] PipeClient# [12:2168:37] 2024-11-21T23:12:59.629912Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2169:37] Status# ERROR ClientId# [13:2169:37] ServerId# [0:0:0] PipeClient# [13:2169:37] 2024-11-21T23:12:59.629950Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2170:37] Status# ERROR ClientId# [14:2170:37] ServerId# [0:0:0] PipeClient# [14:2170:37] 2024-11-21T23:12:59.629985Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2171:37] Status# ERROR ClientId# [15:2171:37] ServerId# [0:0:0] PipeClient# [15:2171:37] 2024-11-21T23:12:59.630037Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2172:37] Status# ERROR ClientId# [16:2172:37] ServerId# [0:0:0] PipeClient# [16:2172:37] 2024-11-21T23:12:59.630072Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2173:37] Status# ERROR ClientId# [17:2173:37] ServerId# [0:0:0] PipeClient# [17:2173:37] 2024-11-21T23:12:59.630112Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2174:37] Status# ERROR ClientId# [18:2174:37] ServerId# [0:0:0] PipeClient# [18:2174:37] 2024-11-21T23:12:59.630151Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2175:37] Status# ERROR ClientId# [19:2175:37] ServerId# [0:0:0] PipeClient# [19:2175:37] 2024-11-21T23:12:59.630188Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2176:37] Status# ERROR ClientId# [20:2176:37] ServerId# [0:0:0] PipeClient# [20:2176:37] 2024-11-21T23:12:59.630223Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2177:37] Status# ERROR ClientId# [21:2177:37] ServerId# [0:0:0] PipeClient# [21:2177:37] 2024-11-21T23:12:59.630277Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2178:37] Status# ERROR ClientId# [22:2178:37] ServerId# [0:0:0] PipeClient# [22:2178:37] 2024-11-21T23:12:59.630331Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2179:37] Status# ERROR ClientId# [23:2179:37] ServerId# [0:0:0] PipeClient# [23:2179:37] 2024-11-21T23:12:59.630371Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2180:37] Status# ERROR ClientId# [24:2180:37] ServerId# [0:0:0] PipeClient# [24:2180:37] 2024-11-21T23:12:59.630434Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2181:37] Status# ERROR ClientId# [25:2181:37] ServerId# [0:0:0] PipeClient# [25:2181:37] 2024-11-21T23:12:59.630470Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2182:37] Status# ERROR ClientId# [26:2182:37] ServerId# [0:0:0] PipeClient# [26:2182:37] 2024-11-21T23:12:59.630516Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2183:37] Status# ERROR ClientId# [27:2183:37] ServerId# [0:0:0] PipeClient# [27:2183:37] 2024-11-21T23:12:59.630612Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2184:37] Status# ERROR ClientId# [28:2184:37] ServerId# [0:0:0] PipeClient# [28:2184:37] 2024-11-21T23:12:59.630651Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2185:37] Status# ERROR ClientId# [29:2185:37] ServerId# [0:0:0] PipeClient# [29:2185:37] 2024-11-21T23:12:59.630700Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2186:37] Status# ERROR ClientId# [30:2186:37] ServerId# [0:0:0] PipeClient# [30:2186:37] 2024-11-21T23:12:59.630766Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2187:37] Status# ERROR ClientId# [31:2187:37] ServerId# [0:0:0] PipeClient# [31:2187:37] 2024-11-21T23:12:59.630821Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2188:37] Status# ERROR ClientId# [32:2188:37] ServerId# [0:0:0] PipeClient# [32:2188:37] 2024-11-21T23:12:59.756584Z 1 00h00m00.002048s :BS_NODE DEBUG: [1] CheckState from [1:2253:71] expected 1 current 0 2024-11-21T23:12:59.756654Z 2 00h00m00.002048s :BS_NODE DEBUG: [2] CheckState from [2:2254:38] expected 1 current 0 2024-11-21T23:12:59.756684Z 3 00h00m00.002048s :BS_NODE DEBUG: [3] CheckState from [3:2255:38] expected 1 current 0 2024-11-21T23:12:59.756714Z 4 00h00m00.002048s :BS_NODE DEBUG: [4] CheckState from [4:2256:38] expected 1 current 0 2024-11-21T23:12:59.756745Z 5 00h00m00.002048s :BS_NODE DEBUG: [5] CheckState from [5:2257:38] expected 1 current 0 2024-11-21T23:12:59.756778Z 6 00h00m00.002048s :BS_NODE DEBUG: [6] CheckState from [6:2258:38] expected 1 current 0 2024-11-21T23:12:59.756815Z 7 00h00m00.002048s :BS_NODE DEBUG: [7] CheckState from [7:2259:38] expected 1 current 0 2024-11-21T23:12:59.756875Z 8 00h00m00.002048s :BS_NODE DEBUG: [8] CheckState from [8:2260:38] expected 1 current 0 2024-11-21T23:12:59.756916Z 9 00h00m00.002048s :BS_NODE DEBUG: [9] CheckState from [9:2261:38] expected 1 current 0 2024-11-21T23:12:59.756944Z 10 00h00m00.002048s :BS_NODE DEBUG: [10] CheckState from [10:2262 ... 00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000032:2:0:7:0] -> [80000032:3:0:7:0] 2024-11-21T23:13:11.898056Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T23:13:11.898099Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] VDiskId# [80000032:3:0:1:0] PDiskId# 1000 VSlotId# 1016 created 2024-11-21T23:13:11.898157Z 27 05h15m00.117920s :BS_NODE DEBUG: [27] VDiskId# [80000032:3:0:1:0] status changed to INIT_PENDING 2024-11-21T23:13:11.898276Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2024-11-21T23:13:11.898317Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [80000022:2:0:0:0] -> [80000022:3:0:0:0] 2024-11-21T23:13:11.898372Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.899134Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T23:13:11.899200Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [80000022:2:0:2:0] -> [80000022:3:0:2:0] 2024-11-21T23:13:11.899279Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T23:13:11.899322Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] VDiskId# [80000022:2:0:3:0] -> [80000022:3:0:3:0] 2024-11-21T23:13:11.899395Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T23:13:11.899438Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000022:2:0:4:0] -> [80000022:3:0:4:0] 2024-11-21T23:13:11.899532Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2024-11-21T23:13:11.899574Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [80000022:2:0:5:0] -> [80000022:3:0:5:0] 2024-11-21T23:13:11.899663Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:11.899707Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000022:2:0:6:0] -> [80000022:3:0:6:0] 2024-11-21T23:13:11.899783Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T23:13:11.899822Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] VDiskId# [80000022:3:0:1:0] PDiskId# 1000 VSlotId# 1018 created 2024-11-21T23:13:11.899884Z 6 05h15m00.117920s :BS_NODE DEBUG: [6] VDiskId# [80000022:3:0:1:0] status changed to INIT_PENDING 2024-11-21T23:13:11.899972Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:11.900019Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000022:2:0:7:0] -> [80000022:3:0:7:0] 2024-11-21T23:13:11.900138Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2024-11-21T23:13:11.900180Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [80000012:2:0:0:0] -> [80000012:3:0:0:0] 2024-11-21T23:13:11.900238Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.900322Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T23:13:11.900366Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [80000012:2:0:2:0] -> [80000012:3:0:2:0] 2024-11-21T23:13:11.900441Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T23:13:11.900482Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] VDiskId# [80000012:2:0:3:0] -> [80000012:3:0:3:0] 2024-11-21T23:13:11.900566Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T23:13:11.900609Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000012:2:0:4:0] -> [80000012:3:0:4:0] 2024-11-21T23:13:11.900682Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2024-11-21T23:13:11.900727Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [80000012:2:0:5:0] -> [80000012:3:0:5:0] 2024-11-21T23:13:11.900799Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:11.900841Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000012:2:0:6:0] -> [80000012:3:0:6:0] 2024-11-21T23:13:11.900911Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:11.900951Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000012:2:0:7:0] -> [80000012:3:0:7:0] 2024-11-21T23:13:11.901027Z 9 05h15m00.117920s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T23:13:11.901066Z 9 05h15m00.117920s :BS_NODE DEBUG: [9] VDiskId# [80000012:3:0:1:0] PDiskId# 1000 VSlotId# 1018 created 2024-11-21T23:13:11.901127Z 9 05h15m00.117920s :BS_NODE DEBUG: [9] VDiskId# [80000012:3:0:1:0] status changed to INIT_PENDING 2024-11-21T23:13:11.901245Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2024-11-21T23:13:11.901290Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [80000002:2:0:0:0] -> [80000002:3:0:0:0] 2024-11-21T23:13:11.901346Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.901410Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T23:13:11.901455Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [80000002:2:0:2:0] -> [80000002:3:0:2:0] 2024-11-21T23:13:11.901521Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T23:13:11.901565Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] VDiskId# [80000002:2:0:3:0] -> [80000002:3:0:3:0] 2024-11-21T23:13:11.901635Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T23:13:11.901679Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [80000002:2:0:4:0] -> [80000002:3:0:4:0] 2024-11-21T23:13:11.901750Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2024-11-21T23:13:11.901793Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [80000002:2:0:5:0] -> [80000002:3:0:5:0] 2024-11-21T23:13:11.901864Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:11.901907Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [80000002:2:0:6:0] -> [80000002:3:0:6:0] 2024-11-21T23:13:11.901976Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:11.902018Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [80000002:2:0:7:0] -> [80000002:3:0:7:0] 2024-11-21T23:13:11.902091Z 10 05h15m00.117920s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2024-11-21T23:13:11.902129Z 10 05h15m00.117920s :BS_NODE DEBUG: [10] VDiskId# [80000002:3:0:1:0] PDiskId# 1001 VSlotId# 1009 created 2024-11-21T23:13:11.902185Z 10 05h15m00.117920s :BS_NODE DEBUG: [10] VDiskId# [80000002:3:0:1:0] status changed to INIT_PENDING 2024-11-21T23:13:11.902306Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2024-11-21T23:13:11.902354Z 17 05h15m00.117920s :BS_NODE DEBUG: [17] VDiskId# [8000003a:2:0:0:0] -> [8000003a:3:0:0:0] 2024-11-21T23:13:11.905450Z 18 05h15m00.117920s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.905586Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T23:13:11.905633Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [8000003a:3:0:1:0] PDiskId# 1000 VSlotId# 1018 created 2024-11-21T23:13:11.905711Z 2 05h15m00.117920s :BS_NODE DEBUG: [2] VDiskId# [8000003a:3:0:1:0] status changed to INIT_PENDING 2024-11-21T23:13:11.905818Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T23:13:11.905880Z 20 05h15m00.117920s :BS_NODE DEBUG: [20] VDiskId# [8000003a:2:0:3:0] -> [8000003a:3:0:3:0] 2024-11-21T23:13:11.905964Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T23:13:11.906011Z 21 05h15m00.117920s :BS_NODE DEBUG: [21] VDiskId# [8000003a:2:0:4:0] -> [8000003a:3:0:4:0] 2024-11-21T23:13:11.906089Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2024-11-21T23:13:11.906137Z 22 05h15m00.117920s :BS_NODE DEBUG: [22] VDiskId# [8000003a:2:0:5:0] -> [8000003a:3:0:5:0] 2024-11-21T23:13:11.906219Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:11.906264Z 23 05h15m00.117920s :BS_NODE DEBUG: [23] VDiskId# [8000003a:2:0:6:0] -> [8000003a:3:0:6:0] 2024-11-21T23:13:11.906341Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:11.906421Z 24 05h15m00.117920s :BS_NODE DEBUG: [24] VDiskId# [8000003a:2:0:7:0] -> [8000003a:3:0:7:0] 2024-11-21T23:13:11.906502Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T23:13:11.906548Z 15 05h15m00.117920s :BS_NODE DEBUG: [15] VDiskId# [8000003a:2:0:2:0] -> [8000003a:3:0:2:0] 2024-11-21T23:13:11.912286Z 2 05h15m01.874920s :BS_NODE DEBUG: [2] VDiskId# [8000003a:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:11.912787Z 6 05h15m02.036920s :BS_NODE DEBUG: [6] VDiskId# [8000001a:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:11.913217Z 22 05h15m02.174920s :BS_NODE DEBUG: [22] VDiskId# [8000002d:4:0:6:0] status changed to REPLICATING 2024-11-21T23:13:11.913801Z 9 05h15m02.374920s :BS_NODE DEBUG: [9] VDiskId# [8000002a:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:11.914235Z 9 05h15m02.980920s :BS_NODE DEBUG: [9] VDiskId# [80000012:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:11.915028Z 27 05h15m03.179920s :BS_NODE DEBUG: [27] VDiskId# [80000032:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:11.915425Z 9 05h15m04.503920s :BS_NODE DEBUG: [9] VDiskId# [8000000a:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:11.915932Z 6 05h15m04.959920s :BS_NODE DEBUG: [6] VDiskId# [80000022:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:11.917149Z 10 05h15m05.322920s :BS_NODE DEBUG: [10] VDiskId# [80000002:3:0:1:0] status changed to REPLICATING 2024-11-21T23:13:11.930918Z 9 05h15m10.980920s :BS_NODE DEBUG: [9] VDiskId# [80000012:3:0:1:0] status changed to READY 2024-11-21T23:13:11.932606Z 18 05h15m10.981432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.932678Z 18 05h15m10.981432s :BS_NODE DEBUG: [18] VDiskId# [80000012:2:0:1:0] destroyed 2024-11-21T23:13:11.932875Z 9 05h15m11.688920s :BS_NODE DEBUG: [9] VDiskId# [8000000a:3:0:1:0] status changed to READY 2024-11-21T23:13:11.933765Z 18 05h15m11.689432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.933816Z 18 05h15m11.689432s :BS_NODE DEBUG: [18] VDiskId# [8000000a:2:0:1:0] destroyed 2024-11-21T23:13:11.934521Z 9 05h15m16.890920s :BS_NODE DEBUG: [9] VDiskId# [8000002a:3:0:1:0] status changed to READY 2024-11-21T23:13:11.935660Z 18 05h15m16.891432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.935720Z 18 05h15m16.891432s :BS_NODE DEBUG: [18] VDiskId# [8000002a:2:0:1:0] destroyed 2024-11-21T23:13:11.936295Z 6 05h15m25.058920s :BS_NODE DEBUG: [6] VDiskId# [80000022:3:0:1:0] status changed to READY 2024-11-21T23:13:11.937301Z 18 05h15m25.059432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.937358Z 18 05h15m25.059432s :BS_NODE DEBUG: [18] VDiskId# [80000022:2:0:1:0] destroyed 2024-11-21T23:13:11.937825Z 22 05h15m26.348920s :BS_NODE DEBUG: [22] VDiskId# [8000002d:4:0:6:0] status changed to READY 2024-11-21T23:13:11.944070Z 18 05h15m26.349432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.944148Z 18 05h15m26.349432s :BS_NODE DEBUG: [18] VDiskId# [8000002d:3:0:6:0] destroyed 2024-11-21T23:13:11.944348Z 6 05h15m26.422920s :BS_NODE DEBUG: [6] VDiskId# [8000001a:3:0:1:0] status changed to READY 2024-11-21T23:13:11.945254Z 18 05h15m26.423432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.945307Z 18 05h15m26.423432s :BS_NODE DEBUG: [18] VDiskId# [8000001a:2:0:1:0] destroyed 2024-11-21T23:13:11.945440Z 2 05h15m26.560920s :BS_NODE DEBUG: [2] VDiskId# [8000003a:3:0:1:0] status changed to READY 2024-11-21T23:13:11.946139Z 18 05h15m26.561432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.946184Z 18 05h15m26.561432s :BS_NODE DEBUG: [18] VDiskId# [8000003a:2:0:1:0] destroyed 2024-11-21T23:13:11.947772Z 27 05h15m33.810920s :BS_NODE DEBUG: [27] VDiskId# [80000032:3:0:1:0] status changed to READY 2024-11-21T23:13:11.948568Z 18 05h15m33.811432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.948617Z 18 05h15m33.811432s :BS_NODE DEBUG: [18] VDiskId# [80000032:2:0:1:0] destroyed 2024-11-21T23:13:11.949406Z 10 05h15m40.137920s :BS_NODE DEBUG: [10] VDiskId# [80000002:3:0:1:0] status changed to READY 2024-11-21T23:13:11.950067Z 18 05h15m40.138432s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:11.950120Z 18 05h15m40.138432s :BS_NODE DEBUG: [18] VDiskId# [80000002:2:0:1:0] destroyed >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:30.910632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:30.910721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.910770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:30.910813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:30.910854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:30.910889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:30.910955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.911256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:30.975851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:30.975893Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.987681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:30.991837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:30.992002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:30.997780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:30.998577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:30.999199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.999484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:31.005441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:31.006696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:31.006757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:31.007009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:31.007068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:31.007115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:31.007221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.014205Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:31.154235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:31.154677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.154903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:31.155146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:31.155196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.163234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:31.163378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:31.163578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.163660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:31.163724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:31.163757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:31.171215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.171283Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:31.171315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:31.186752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.186806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.186876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.186941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.193613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:31.196840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:31.197054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:31.197946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:31.198065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:31.198108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.198375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:31.198449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:31.198607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:31.198674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:31.207219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:31.207267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:31.207428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:31.207462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:31.207821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:31.207880Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:31.207983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:31.208016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.208057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:31.208096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:31.208127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:31.208152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:31.208206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:31.208236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:31.208266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:31.209916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:31.210022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:31.210055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:31.210089Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:31.210124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:31.220472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... _TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T23:13:05.234973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T23:13:05.235060Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T23:13:05.235229Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T23:13:05.235281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T23:13:05.235311Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T23:13:05.250695Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:13:08.019904Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0023 2024-11-21T23:13:08.041448Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0017 2024-11-21T23:13:08.063715Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T23:13:08.063922Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T23:13:08.064023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T23:13:08.064074Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T23:13:08.064202Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T23:13:08.064279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T23:13:08.064376Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T23:13:08.074770Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:13:11.121728Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0019 2024-11-21T23:13:11.147041Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0014 2024-11-21T23:13:11.172224Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2024-11-21T23:13:11.172432Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2024-11-21T23:13:11.172555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2024-11-21T23:13:11.172606Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2024-11-21T23:13:11.172742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2024-11-21T23:13:11.172800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2024-11-21T23:13:11.172844Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2024-11-21T23:13:11.190709Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2024-11-21T23:13:12.182601Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [3:560:2521], attempt# 1 2024-11-21T23:13:12.239963Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [3:559:2520] 2024-11-21T23:13:12.263105Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [3:560:2521], sender# [3:559:2520] 2024-11-21T23:13:12.263193Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [3:559:2520] 2024-11-21T23:13:12.263353Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [3:560:2521], sender# [3:559:2520], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 } 2024-11-21T23:13:12.263561Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [3:560:2521], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:61648 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8C6A14C1-6335-457A-B0AF-3009709B0F20 amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2024-11-21T23:13:12.283658Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [3:560:2521], result# 2024-11-21T23:13:12.283929Z node 3 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [3:559:2520], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2024-11-21T23:13:12.304008Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:13:12.304118Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2024-11-21T23:13:12.304320Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:13:12.304435Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 438 RawX2: 12884904297 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2024-11-21T23:13:12.304514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:12.304558Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:12.304628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:13:12.304681Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2024-11-21T23:13:12.304912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:12.312464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:12.312961Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2024-11-21T23:13:12.313027Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2024-11-21T23:13:12.313146Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2024-11-21T23:13:12.313187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:13:12.313243Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2024-11-21T23:13:12.313335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:123:2149] message: TxId: 281474976710759 2024-11-21T23:13:12.313403Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2024-11-21T23:13:12.313449Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2024-11-21T23:13:12.313481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2024-11-21T23:13:12.313621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:13:12.323397Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2024-11-21T23:13:12.323501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2024-11-21T23:13:12.332534Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:13:12.332606Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:580:2537] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> KqpSinkTx::ExplicitTcl [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TxUsage::WriteToTopic_Demo_17 [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::ExplicitTcl [GOOD] Test command err: Trying to start YDB, gRPC: 1687, MsgBus: 17977 2024-11-21T23:12:39.730311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874462541896451:2207];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:39.731520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001147/r3tmp/tmpwex5SY/pdisk_1.dat 2024-11-21T23:12:40.517718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:40.517830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:40.582241Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:40.583918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1687, node 1 2024-11-21T23:12:40.826866Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:40.826894Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:40.826899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:40.826987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17977 TClient is connected to server localhost:17977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:42.357573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.382544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:44.729215Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874462541896451:2207];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:44.729278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:46.476320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874492606668002:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.476452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874492606667980:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.476734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.481881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:12:46.516595Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:12:46.522636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874492606668012:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:12:47.207289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.346186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:12:49.807382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T23:12:55.470513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:12:55.470546Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 3672, MsgBus: 26731 2024-11-21T23:12:59.394905Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874546128690823:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001147/r3tmp/tmp9JZGtB/pdisk_1.dat 2024-11-21T23:12:59.582623Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:59.658438Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:59.662164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:59.662244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:59.680373Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3672, node 2 2024-11-21T23:12:59.865836Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:59.865860Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:59.865867Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:59.865954Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26731 TClient is connected to server localhost:26731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:13:00.593818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:00.607221Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:13:04.354717Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874546128690823:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:04.354776Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:13:04.902595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874567603527754:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:04.902710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874567603527788:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:04.902789Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:04.909751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:13:04.925106Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:13:04.925395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874567603527791:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:13:05.068427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.113218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:13:06.706793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:13:09.032014Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWE0MTFiMzQtNmQxZGIzNTktMTcxYmM4NS0xZDYyN2ZmYg==, ActorId: [2:7439874584783405117:2941], ActorState: ReadyState, TraceId: 01jd8g0dwh3pedqpg8rkv9d8ds, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:12:30.700323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:12:30.700411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.700452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:12:30.700487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:12:30.700529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:12:30.700575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:12:30.700646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:12:30.700951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:12:30.781104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:12:30.781155Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.792469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:12:30.796319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:12:30.796480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:12:30.802738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:12:30.803341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:12:30.803965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.804291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:12:30.808695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.812645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.812704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.812933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:12:30.813000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.813040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:12:30.813107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.819283Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:12:30.937949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:12:30.938183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.939412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:12:30.939743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:12:30.939808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.943352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.943450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:12:30.943640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.943733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:12:30.943770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:12:30.943802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:12:30.945298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.945339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:12:30.945370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:12:30.946793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.946838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.946897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.946954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.950084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:12:30.951945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:12:30.952114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:12:30.953032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:12:30.953175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:12:30.953234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.953496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:12:30.953550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:12:30.953679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.953773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:12:30.955671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:12:30.955732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:12:30.955856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:12:30.955891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:12:30.956253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:12:30.956306Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:12:30.956408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:12:30.956457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.956524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:12:30.956574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:12:30.956614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:12:30.956642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:12:30.956699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:12:30.956736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:12:30.956770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:12:30.958384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.958961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:12:30.958999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:12:30.959039Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:12:30.959067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:12:30.959133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 74976720762 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976720762 2024-11-21T23:13:14.937677Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T23:13:14.937719Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T23:13:14.937786Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2024-11-21T23:13:14.937929Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:14.939328Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.939424Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.939456Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T23:13:14.939489Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T23:13:14.939522Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:13:14.944900Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.945003Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.945035Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T23:13:14.945069Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2024-11-21T23:13:14.945103Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:13:14.945186Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2024-11-21T23:13:14.946666Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T23:13:14.947262Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2024-11-21T23:13:14.947313Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2024-11-21T23:13:14.947362Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2024-11-21T23:13:14.947838Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2024-11-21T23:13:14.947950Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2024-11-21T23:13:14.948559Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:14.948663Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 17179871338 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:14.948706Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2024-11-21T23:13:14.948836Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2024-11-21T23:13:14.948902Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2024-11-21T23:13:14.948937Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2024-11-21T23:13:14.949009Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:14.949079Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:13:14.949118Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2024-11-21T23:13:14.949169Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2024-11-21T23:13:14.949210Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2024-11-21T23:13:14.949242Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2024-11-21T23:13:14.949302Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:13:14.949344Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2024-11-21T23:13:14.949379Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T23:13:14.949408Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T23:13:14.952568Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.952708Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.954380Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:14.954442Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:14.954595Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:13:14.954699Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:14.954748Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:332:2310], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2024-11-21T23:13:14.954786Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:332:2310], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2024-11-21T23:13:14.955615Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.955709Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.955757Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T23:13:14.955812Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T23:13:14.955859Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:13:14.956308Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.956391Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.956418Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2024-11-21T23:13:14.956445Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T23:13:14.956473Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:13:14.956535Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2024-11-21T23:13:14.956581Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:285:2273] 2024-11-21T23:13:14.960608Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.960946Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2024-11-21T23:13:14.961018Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2024-11-21T23:13:14.961078Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2024-11-21T23:13:14.961122Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2024-11-21T23:13:14.961159Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2024-11-21T23:13:14.961203Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2024-11-21T23:13:14.962736Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2024-11-21T23:13:14.962825Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:13:14.962881Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:695:2639] TestWaitNotification: OK eventTxId 102 >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:09:01.303713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:09:01.303815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:01.303851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:09:01.303881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:09:01.303948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:09:01.303994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:09:01.304068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:01.304420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:01.388736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:09:01.388799Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:09:01.398581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:01.399002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:09:01.399197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:09:01.416399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:09:01.416768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:09:01.417448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:01.418050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:01.421044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:01.422297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:01.422354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:01.422444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:01.422485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:01.422519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:01.422762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:09:01.429321Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:09:01.572349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:01.572595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.572779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:01.573000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:01.573066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.575571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:01.575676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:01.575845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.575902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:01.575954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:01.575991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:01.577696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.577747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:01.577777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:01.579377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.579445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.579480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:01.579515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:01.588347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:01.591770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:01.592036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:01.593116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:01.593251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:01.593303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:01.593550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:01.593615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:01.593775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:01.593867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:01.602482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:01.602557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:01.602768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:01.602816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:01.603205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:01.603256Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:01.603360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:01.603396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:01.603446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:01.603513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:01.603576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:01.603607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:01.603681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:01.603721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:01.603758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... CompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:15.977553Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:13:15.977937Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 433us result status StatusSuccess 2024-11-21T23:13:15.979048Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:15.990371Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:817:2653] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:13:15.990531Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:756:2653] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T23:13:15.990725Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:817:2653] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732230795956201 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732230795956201 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732230795956201 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:13:15.995568Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:817:2653] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T23:13:15.995717Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:756:2653] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 32648, MsgBus: 11930 2024-11-21T23:12:39.863846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874460028787180:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:39.863902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00111e/r3tmp/tmpSB1GuN/pdisk_1.dat 2024-11-21T23:12:40.574681Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:40.577510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:40.577571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:40.580811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32648, node 1 2024-11-21T23:12:40.824866Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:40.824892Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:40.824902Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:40.824976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11930 TClient is connected to server localhost:11930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:42.390428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.413873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.612564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.879339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.990069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:44.811310Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874460028787180:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:44.811374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:46.607322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874490093559852:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:46.607448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:47.320754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.400420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.448739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.506638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.557822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.684879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.894679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874494388527670:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:47.894743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:47.894921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874494388527675:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:47.898765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:47.912156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874494388527677:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:55.555434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:12:55.555486Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:58.212117Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439874541633168804:2576], TxId: 281474976710678, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OGMxNmYyYjctZmVjYzFlZTktNzVmYTYyNDUtZTg0Y2IwOWU=. TraceId : 01jd8g032h083q814btchyr8xy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1732230770000/18446744073709551615 shard 72075186224037888 with lowWatermark v1732230770172/18446744073709551615 (node# 1 state# Ready) } } 2024-11-21T23:12:58.212716Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439874541633168804:2576], TxId: 281474976710678, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OGMxNmYyYjctZmVjYzFlZTktNzVmYTYyNDUtZTg0Y2IwOWU=. TraceId : 01jd8g032h083q814btchyr8xy. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1732230770000/18446744073709551615 shard 72075186224037888 with lowWatermark v1732230770172/18446744073709551615 (node# 1 state# Ready) } }. 2024-11-21T23:12:58.214562Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439874541633168805:2577], TxId: 281474976710678, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OGMxNmYyYjctZmVjYzFlZTktNzVmYTYyNDUtZTg0Y2IwOWU=. TraceId : 01jd8g032h083q814btchyr8xy. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439874541633168800:2470], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T23:12:58.223126Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGMxNmYyYjctZmVjYzFlZTktNzVmYTYyNDUtZTg0Y2IwOWU=, ActorId: [1:7439874502978462646:2470], ActorState: ExecuteState, TraceId: 01jd8g032h083q814btchyr8xy, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 13963, MsgBus: 16770 2024-11-21T23:12:59.535145Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874545854365407:2138];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00111e/r3tmp/tmpe56iHG/pdisk_1.dat 2024-11-21T23:12:59.620399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:59.835379Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:59.858016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:59.858095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:59.864241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13963, node 2 2024-11-21T23:13:00.059033Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:13:00.059056Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:13:00.059069Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:13:00.059179Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16770 TClient is connected to server localhost:16770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:13:01.104551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:01.115321Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:13:01.127506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:01.248849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:01.541151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:01.658075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:04.519318Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874545854365407:2138];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:04.519398Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:13:05.063994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874571624170786:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.064127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.120932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.169254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.229578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.272410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.319727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.382324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.492537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874571624171287:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.492638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.492910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874571624171292:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:05.498987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:13:05.512080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874571624171294:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:13:14.711118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:13:14.711144Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:16.489984Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439874618868812390:2582], TxId: 281474976715678, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZTczZjcyOTYtMjViZDYxZmQtNmUyZTkzNTUtNjA0NTIxZWQ=. CustomerSuppliedId : . TraceId : 01jd8g0mt1eq533yzakkaxtnmm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1732230787000/18446744073709551615 shard 72075186224037888 with lowWatermark v1732230787420/18446744073709551615 (node# 2 state# Ready) } } 2024-11-21T23:13:16.490070Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439874618868812390:2582], TxId: 281474976715678, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZTczZjcyOTYtMjViZDYxZmQtNmUyZTkzNTUtNjA0NTIxZWQ=. CustomerSuppliedId : . TraceId : 01jd8g0mt1eq533yzakkaxtnmm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1732230787000/18446744073709551615 shard 72075186224037888 with lowWatermark v1732230787420/18446744073709551615 (node# 2 state# Ready) } }. 2024-11-21T23:13:16.490989Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439874618868812391:2583], TxId: 281474976715678, task: 2. Ctx: { TraceId : 01jd8g0mt1eq533yzakkaxtnmm. SessionId : ydb://session/3?node_id=2&id=ZTczZjcyOTYtMjViZDYxZmQtNmUyZTkzNTUtNjA0NTIxZWQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439874618868812386:2465], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T23:13:16.492453Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTczZjcyOTYtMjViZDYxZmQtNmUyZTkzNTUtNjA0NTIxZWQ=, ActorId: [2:7439874575919138923:2465], ActorState: ExecuteState, TraceId: 01jd8g0mt1eq533yzakkaxtnmm, Create QueryResponse for error on request, msg: |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> KqpUserConstraint::KqpReadNull-UploadNull |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] Test command err: 2024-11-21T23:10:29.663390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873903634599143:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:29.663454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:29.903187Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bff/r3tmp/tmpkO4eTx/pdisk_1.dat 2024-11-21T23:10:30.294479Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22515, node 1 2024-11-21T23:10:30.413401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:30.413502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:30.415705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:30.422976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002bff/r3tmp/yandexgArgyY.tmp 2024-11-21T23:10:30.422999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002bff/r3tmp/yandexgArgyY.tmp 2024-11-21T23:10:30.423132Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002bff/r3tmp/yandexgArgyY.tmp 2024-11-21T23:10:30.423231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:30.466318Z INFO: TTestServer started on Port 31468 GrpcPort 22515 TClient is connected to server localhost:31468 PQClient connected to localhost:22515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:30.950008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:10:31.000642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:10:31.203535Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:10:33.339383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873920814468884:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.339551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.339923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873920814468905:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.350373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:33.390296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873920814468936:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.403321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.404564Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T23:10:33.408871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873920814468907:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:33.827699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:33.836897Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873920814468974:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:33.860307Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjQ0NjgyNjktMTIxMTcxZTYtNzQ0YzJjNmYtNDYxMjhlMmY=, ActorId: [1:7439873920814468866:2306], ActorState: ExecuteState, TraceId: 01jd8fvnv962hwqpa82fvrdmbk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:33.872222Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:33.903151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:34.042235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873925109436553:2623] 2024-11-21T23:10:34.664372Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873903634599143:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:34.664455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:40.516133Z :WriteRead INFO: TTopicSdkTestSetup started 2024-11-21T23:10:40.543279Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:40.558767Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873950879240658:2814] connected; active server actors: 1 2024-11-21T23:10:40.559326Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:40.559715Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:40.559823Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:40.560290Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:40.578896Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:40.580245Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:40.580492Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:40.580685Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:40.580713Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:40.580736Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:40.580750Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:40.580777Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:40.580837Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:40.580861Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:40.581021Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:40.581068Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:40.581199Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873950879240675:2436], now have 1 active actors on pipe 2024-11-21T23:10:40.604225Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:40.604325Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873950879240657:2813], now have 1 active actors on pipe 2024-11-21T23:10:40.606108Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T23:10:40.628345Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439873907929566639:2198] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 ... eHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2024-11-21T23:13:16.196136Z node 10 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [10:7439874621498991067:2449] (SourceId=test-message_group_id, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2024-11-21T23:13:16.196170Z node 10 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2024-11-21T23:13:16.206630Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 10, Generation: 1 2024-11-21T23:13:16.206687Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:13:16.206745Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [10:7439874621498991070:2449], now have 1 active actors on pipe 2024-11-21T23:13:16.206792Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T23:13:16.206832Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T23:13:16.207001Z node 10 :PERSQUEUE INFO: new Cookie test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0 generated for partition 0 topic 'test-topic' owner test-message_group_id 2024-11-21T23:13:16.207118Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:13:16.207189Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:13:16.208782Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T23:13:16.208841Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T23:13:16.208974Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:13:16.209109Z node 10 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0 2024-11-21T23:13:16.210906Z :DEBUG: [/Root] SessionId [test-message_group_id|acf6b6ea-158ca3ee-2cb9821b-2a5794ec_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:16.211059Z :INFO: [/Root] SessionId [test-message_group_id|acf6b6ea-158ca3ee-2cb9821b-2a5794ec_0] PartitionId [0] Generation [1] Write session established. Init response: session_id: "test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0" 2024-11-21T23:13:16.211109Z :TRACE: [/Root] TRACE_EVENT InitResponse partition_id=0 session_id=test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0 2024-11-21T23:13:16.211172Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: set DirectWriteToPartitionId 0 2024-11-21T23:13:16.211518Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write 1 messages with Id from 1 to 1 2024-11-21T23:13:16.211642Z :INFO: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: close. Timeout 18446744073709.551615s 2024-11-21T23:13:16.212171Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T23:13:16.212219Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 1 2024-11-21T23:13:16.214886Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T23:13:16.215318Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T23:13:16.215759Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T23:13:16.215812Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T23:13:16.215942Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T23:13:16.216022Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T23:13:16.216405Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T23:13:16.216432Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T23:13:16.216529Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: test-topic partition: 0 SourceId: '\0test-message_group_id' SeqNo: 1 partNo : 0 messageNo: 1 size 98 offset: -1 2024-11-21T23:13:16.216829Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 1 partNo 0 2024-11-21T23:13:16.217838Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 187 count 1 nextOffset 1 batches 1 2024-11-21T23:13:16.218995Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 175 WTime 1732230796217 2024-11-21T23:13:16.219320Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:13:16.231891Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 120 2024-11-21T23:13:16.231976Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:16.232064Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T23:13:16.232348Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:13:16.232400Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T23:13:16.232512Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T23:13:16.232677Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T23:13:16.233150Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'test-topic' partition 0 user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T23:13:16.233199Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T23:13:16.233259Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T23:13:16.233286Z node 10 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T23:13:16.233372Z node 10 :PERSQUEUE DEBUG: Topic 'test-topic' partition 0 user test-consumer readTimeStamp done, result 1732230796216 queuesize 0 startOffset 0 2024-11-21T23:13:16.251090Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:16.251327Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 13000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:16.251379Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] OnAck: seqNo=1, txId=? 2024-11-21T23:13:16.251428Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: acknoledged message 1 2024-11-21T23:13:16.313062Z :INFO: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T23:13:16.313142Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T23:13:16.313782Z :INFO: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T23:13:16.314526Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T23:13:16.314625Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2024-11-21T23:13:16.314692Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2024-11-21T23:13:16.316246Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0 grpc read done: success: 0 data: 2024-11-21T23:13:16.316292Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0 grpc read failed 2024-11-21T23:13:16.316350Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0 grpc closed 2024-11-21T23:13:16.316409Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0 is DEAD 2024-11-21T23:13:16.330655Z :DEBUG: [/Root] SessionId [test-message_group_id|e0cbec06-62be76bd-6f86d0b0-b3fc128_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T23:13:16.318054Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:13:16.318696Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:16.318753Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [10:7439874621498991070:2449] destroyed 2024-11-21T23:13:16.318821Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TGRpcStreamingTest::SimpleEcho |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2024-11-21T23:13:15.129486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:13:15.132122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:13:15.132241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0021cb/r3tmp/tmp9v01h7/pdisk_1.dat 2024-11-21T23:13:15.545096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:13:15.639354Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:15.713909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:15.714106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:15.727683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:15.874612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:13:16.810530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:16.810676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:832:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:16.810740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:16.825420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:13:17.059399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:836:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:13:17.803572Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8g0ng80y8apfqvftx4yadk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhkZmY3MWEtMTFhYmUzOC0yYTMxMDljMy1kYjM1MmFmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:13:17.810633Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:950:2756], TxId: 281474976715660, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODhkZmY3MWEtMTFhYmUzOC0yYTMxMDljMy1kYjM1MmFmNQ==. TraceId : 01jd8g0ng80y8apfqvftx4yadk. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2024-11-21T23:13:17.813304Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:950:2756], TxId: 281474976715660, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODhkZmY3MWEtMTFhYmUzOC0yYTMxMDljMy1kYjM1MmFmNQ==. TraceId : 01jd8g0ng80y8apfqvftx4yadk. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2024-11-21T23:13:17.840840Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:951:2757], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01jd8g0ng80y8apfqvftx4yadk. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ODhkZmY3MWEtMTFhYmUzOC0yYTMxMDljMy1kYjM1MmFmNQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T23:13:17.863663Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODhkZmY3MWEtMTFhYmUzOC0yYTMxMDljMy1kYjM1MmFmNQ==, ActorId: [1:820:2673], ActorState: ExecuteState, TraceId: 01jd8g0ng80y8apfqvftx4yadk, Create QueryResponse for error on request, msg: 2024-11-21T23:13:17.864829Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8g0ng80y8apfqvftx4yadk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhkZmY3MWEtMTFhYmUzOC0yYTMxMDljMy1kYjM1MmFmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |84.0%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TxUsage::WriteToTopic_Demo_25 [GOOD] >> TGRpcStreamingTest::WritesDoneFromClient >> KqpSinkTx::OlapInvalidateOnError [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> TGRpcStreamingTest::ReadFinish >> TS3WrapperTests::UploadUnknownPart >> TGRpcStreamingTest::ClientNeverWrites |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> LocalPartition::WithoutPartitionWithSplit [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2024-11-21T23:13:21.635829Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4CB44B11-7E59-453D-8F4F-B7159E2D8907, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:63729 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8DA1B640-C01C-4CD3-AA7A-AC1EFFE133D5 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2024-11-21T23:13:21.644823Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4CB44B11-7E59-453D-8F4F-B7159E2D8907, response# |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TSettingsValidation::TestDifferentDedupParams >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> TS3WrapperTests::PutObject >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 62876, MsgBus: 21360 2024-11-21T23:12:29.787291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874416544553430:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:29.787342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001199/r3tmp/tmp2Itj4g/pdisk_1.dat 2024-11-21T23:12:30.363034Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.413599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:30.422652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:30.435037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62876, node 1 2024-11-21T23:12:30.600458Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:30.600478Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:30.600488Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:30.602331Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21360 TClient is connected to server localhost:21360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:31.481699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:31.499114Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:34.199161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874438019390381:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:34.199337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:34.199735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874438019390417:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:34.204583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:12:34.219171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874438019390419:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:12:34.677940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:34.790840Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874416544553430:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:34.790905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:34.843929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.844144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:34.844440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:34.844591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:34.844717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:34.844859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:34.844986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:34.845098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:34.845222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:34.845337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:34.845445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:34.845545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7439874438019390671:2324];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:34.866896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.866963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:34.867127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:34.867210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:34.867288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:34.867374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:34.867448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:34.867534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:34.867644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:34.867762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:34.867843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:34.867921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874438019390664:2323];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:34.879212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439874438019390658:2317];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.879271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7439874438019390658:2317];tablet_id=72075186224037896;proce ... MNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7439874583645427881:3074];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.533360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7439874583645427794:3059];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.533485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7439874583645427758:3045];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.533610Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7439874583645427773:3053];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.533745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[2:7439874583645427476:2992];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038089;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.533867Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7439874579350459268:2871];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.534097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7439874583645427717:3037];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.548344Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7439874583645427650:3020];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038056;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.548632Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7439874583645427717:3037];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.548829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7439874583645427733:3038];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.549464Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7439874583645427650:3020];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.549627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7439874583645427733:3038];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.558674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7439874583645427524:3003];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038049;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.559019Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7439874583645427423:2966];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.559213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[2:7439874562170586955:2479];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037915;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.562676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7439874583645427524:3003];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.562924Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7439874583645427423:2966];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.563088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;self_id=[2:7439874562170586955:2479];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037915;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.567958Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7439874583645427764:3050];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.568283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7439874583645427833:3072];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.568592Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7439874553580651079:2319];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.578835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7439874583645427764:3050];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.579075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7439874583645427833:3072];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.579210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7439874553580651079:2319];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.579422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7439874583645427752:3043];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.579916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7439874583645427752:3043];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.608440Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7439874583645427656:3023];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.634485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7439874583645427656:3023];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.773199Z node 2 :TX_COLUMNSHARD WARN: tablet_id=7;fline=restore.cpp:23;event=merge_data_problems;write_id=2;tablet_id=7;message=Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]}; 2024-11-21T23:13:13.773491Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224037993;self_id=[2:7439874557875619383:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037993;event=TEvWriteBlobsResult;fline=events.h:97;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]};tx_id=281474976710665; 2024-11-21T23:13:13.773734Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439874557875619383:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037993;event=TEvWriteBlobsResult;tablet_id=72075186224037993;local_tx_no=11;tx_info=TTxWrite;tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:13.773932Z node 2 :TX_COLUMNSHARD_SCAN WARN: Scan [2:7439874605120267703:3677] got AbortExecution txId: 281474976710665 scanId: 1 gen: 1 tablet: 72075186224037993 code: ABORTED reason: {
: Error: task finished: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } 2024-11-21T23:13:13.774539Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439874605120267699:3674], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [0:0:0]Got BAD REQUEST for table `[OwnerId: 72057594046644480, LocalPathId: 7]`. ShardID=72075186224037993, Sink=[2:7439874605120267699:3674].{
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } 2024-11-21T23:13:13.774655Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439874605120267696:3674], TxId: 281474976710665, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd8g0hcv0y23mc6hw3a36kf4. SessionId : ydb://session/3?node_id=2&id=ZDBlMGRjMzItYWIzMjYzZTYtZTNlZDdmZDctZTQzZWFjNDQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Sink[0] fatal error: {
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } subissue: {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } } 2024-11-21T23:13:13.774717Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439874605120267696:3674], TxId: 281474976710665, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd8g0hcv0y23mc6hw3a36kf4. SessionId : ydb://session/3?node_id=2&id=ZDBlMGRjMzItYWIzMjYzZTYtZTNlZDdmZDctZTQzZWFjNDQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST DEFAULT_ERROR: {
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } subissue: {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} } }. 2024-11-21T23:13:13.775966Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDBlMGRjMzItYWIzMjYzZTYtZTNlZDdmZDctZTQzZWFjNDQ=, ActorId: [2:7439874600825300313:3644], ActorState: ExecuteState, TraceId: 01jd8g0hcv0y23mc6hw3a36kf4, Create QueryResponse for error on request, msg:
: Fatal: Bad request. Table `/Root/KV`. {
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} }
: Fatal: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32 not null"]} 2024-11-21T23:13:13.945649Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDBlMGRjMzItYWIzMjYzZTYtZTNlZDdmZDctZTQzZWFjNDQ=, ActorId: [2:7439874600825300313:3644], ActorState: ExecuteState, TraceId: 01jd8g0jhv104zd7rnm805tq7m, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jd8g0hchcw9qz59fgj3v52v3, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TS3WrapperTests::CopyPartUpload >> TS3WrapperTests::PutObject [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject >> KqpSinkTx::OlapInteractive [GOOD] >> TS3WrapperTests::HeadObject [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> TS3WrapperTests::CopyPartUpload [GOOD] >> TGRpcStreamingTest::SimpleEcho [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2024-11-21T23:13:22.888917Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D349B3D8-2076-4D51-B466-D9A64B5BC9C1, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:5448 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C3CDC45F-1D43-42B1-B3AA-267742CB05AC amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T23:13:22.894770Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D349B3D8-2076-4D51-B466-D9A64B5BC9C1, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } >> TS3WrapperTests::GetObject >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2024-11-21T23:13:23.160484Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0CE2498E-2C72-4334-9ADF-9906414A8C2B, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:20996 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B9963A72-3764-4F3B-9A30-551AB081E6F7 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T23:13:23.176406Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0CE2498E-2C72-4334-9ADF-9906414A8C2B, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T23:13:23.176913Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0A10222D-B45E-4CD2-B48F-DBDD018D7E15, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:20996 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 53E697C3-A3A2-462E-8D8D-DE3330C1F351 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T23:13:23.182077Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0A10222D-B45E-4CD2-B48F-DBDD018D7E15, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2024-11-21T23:13:23.081764Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A1C071C9-544B-46C6-88BA-50F821E43A81, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:4008 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9D28AE7C-E1D8-42BA-9F28-B28CD75FEC96 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T23:13:23.091873Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A1C071C9-544B-46C6-88BA-50F821E43A81, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T23:13:23.098799Z node 1 :S3_WRAPPER NOTICE: Request: uuid# DA3A1D61-0467-45EC-AEC9-8BFCF5846477, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:4008 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8168E6AE-63ED-4614-8366-BDECB5957F0F amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2024-11-21T23:13:23.103524Z node 1 :S3_WRAPPER NOTICE: Response: uuid# DA3A1D61-0467-45EC-AEC9-8BFCF5846477, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2024-11-21T23:13:23.104202Z node 1 :S3_WRAPPER NOTICE: Request: uuid# DB7C0C70-D20D-4468-A72E-0D0213050D48, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:4008 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A07708A6-0AC1-4F8C-A78D-7F0F33E12EF3 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2024-11-21T23:13:23.122581Z node 1 :S3_WRAPPER NOTICE: Response: uuid# DB7C0C70-D20D-4468-A72E-0D0213050D48, response# UploadPartCopyResult { } 2024-11-21T23:13:23.123414Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 2DF971A4-AA8C-46EF-95AC-7643585F7F98, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:4008 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E5C6B81C-2762-4A1C-B851-3D8CCBB51AC6 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2024-11-21T23:13:23.135195Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 2DF971A4-AA8C-46EF-95AC-7643585F7F98, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2024-11-21T23:13:23.136741Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 298239CA-85F7-481D-A1F1-0F9C4A67BB4F, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:4008 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6D3724C2-9870-4201-8B32-2478964DEE75 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2024-11-21T23:13:23.159126Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 298239CA-85F7-481D-A1F1-0F9C4A67BB4F, response# GetObjectResult { } >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> TS3WrapperTests::GetObject [GOOD] |84.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |84.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 27865, MsgBus: 29687 2024-11-21T23:12:39.873347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874460069870968:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:39.873544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001148/r3tmp/tmpVIoDat/pdisk_1.dat 2024-11-21T23:12:40.511223Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:40.549109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:40.549204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:40.555481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27865, node 1 2024-11-21T23:12:40.758899Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:40.758920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:40.758926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:40.759007Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29687 TClient is connected to server localhost:29687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:42.160124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:42.203289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:44.869413Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874460069870968:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:44.869513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:45.779579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874485839675229:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.779751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.780144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874485839675256:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.886360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:12:45.992024Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:12:45.992301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874485839675258:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:12:46.747443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:47.146215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:12:50.407303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T23:12:55.505544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:12:55.505582Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 30247, MsgBus: 23107 2024-11-21T23:12:59.632918Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874548530068609:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:59.632978Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001148/r3tmp/tmp7GX47b/pdisk_1.dat 2024-11-21T23:12:59.767168Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:59.786417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:59.786505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:59.788401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30247, node 2 2024-11-21T23:12:59.951066Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:59.951089Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:59.951110Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:59.951239Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23107 TClient is connected to server localhost:23107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:13:00.559753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:00.567200Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:13:03.279852Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874565709938408:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:03.279978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:03.280325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874565709938420:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:03.284746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:13:03.297815Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2024-11-21T23:13:03.298015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874565709938422:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:13:03.423635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:13:03.557594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439874565709938624:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:13:03.557853Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439874565709938624:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:13:03.558211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439874565709938624:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:13:03.558342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439874565709938624:2316];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:13:03.558468Z ... :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7439874591479748004:3143];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.875568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7439874591479747675:3031];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.875766Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7439874591479747861:3113];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.876541Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7439874591479747728:3067];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.876741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7439874591479747719:3062];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.876943Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7439874591479747910:3127];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.877801Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7439874591479747686:3039];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.879358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[2:7439874591479748022:3148];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038083;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.881267Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7439874591479747742:3081];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038002;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.881614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7439874591479747729:3068];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.883406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7439874591479747708:3051];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.883603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7439874591479748007:3144];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.883735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7439874591479747678:3034];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.884031Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7439874591479748017:3147];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.884505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7439874591479747751:3089];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.884976Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7439874591479747740:3079];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885077Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7439874591479747739:3078];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7439874591479747887:3122];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885223Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7439874591479747855:3111];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885302Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7439874591479747769:3101];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885363Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7439874591479748063:3149];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7439874591479747703:3046];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885495Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7439874591479747889:3123];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7439874591479747734:3073];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7439874591479747737:3076];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.885870Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7439874591479747720:3063];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.886003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7439874591479747759:3094];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.886226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7439874591479747775:3104];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.887119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7439874591479747735:3074];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.887269Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7439874591479747699:3042];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.887391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7439874591479747709:3052];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.887533Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[2:7439874591479747922:3131];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.887692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7439874591479747864:3116];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.887868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7439874591479747884:3120];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.888029Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7439874591479747597:3027];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.888154Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7439874591479748004:3143];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.888266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7439874591479747686:3039];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.888418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7439874591479747731:3070];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.889028Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7439874591479747744:3083];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.889162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7439874591479747886:3121];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.889290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7439874591479747704:3047];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.889378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[2:7439874591479748014:3146];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038087;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.889597Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7439874591479747900:3125];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:15.893141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7439874591479747884:3120];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2024-11-21T23:13:20.278437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874635398242112:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:20.279062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001297/r3tmp/tmpQnYtmn/pdisk_1.dat 2024-11-21T23:13:20.699250Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:20.724512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:20.724600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:20.739871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:20.788669Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream accepted Name# Session ok# true peer# ipv6:[::1]:46670 2024-11-21T23:13:20.788940Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade attach Name# Session actor# [1:7439874635398242484:2250] peer# ipv6:[::1]:46670 2024-11-21T23:13:20.788973Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade read Name# Session peer# ipv6:[::1]:46670 2024-11-21T23:13:20.789319Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] read finished Name# Session ok# true data# peer# ipv6:[::1]:46670 2024-11-21T23:13:20.789407Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2024-11-21T23:13:20.789453Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade write Name# Session data# peer# ipv6:[::1]:46670 2024-11-21T23:13:20.789672Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade finish Name# Session peer# ipv6:[::1]:46670 grpc status# (0) message# 2024-11-21T23:13:20.789907Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] write finished Name# Session ok# true peer# ipv6:[::1]:46670 2024-11-21T23:13:20.790166Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream done notification Name# Session ok# true peer# ipv6:[::1]:46670 2024-11-21T23:13:20.790260Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream finished Name# Session ok# true peer# ipv6:[::1]:46670 grpc status# (0) message# 2024-11-21T23:13:20.790305Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] deregistering request Name# Session peer# ipv6:[::1]:46670 (finish done) ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2024-11-21T23:13:24.048716Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 3BAAC808-9869-4515-B745-41EC9EC7E569, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:14521 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 050163E1-54A5-410B-B547-D9C4C42AD4BE amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2024-11-21T23:13:24.057099Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 3BAAC808-9869-4515-B745-41EC9EC7E569, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T23:13:24.058630Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 6896DAB4-B013-45AB-903A-83C058C90CCA, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:14521 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3EF9AF20-9A1B-4CB2-8FD2-02F399E2BCFD amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T23:13:24.063694Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 6896DAB4-B013-45AB-903A-83C058C90CCA, response# GetObjectResult { } >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> TS3WrapperTests::HeadUnknownObject >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2024-11-21T23:13:20.855715Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874636502547220:2214];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:20.856449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001252/r3tmp/tmpv1Ehon/pdisk_1.dat 2024-11-21T23:13:21.382527Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:21.384678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:21.384761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:21.389175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:21.464983Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream accepted Name# Session ok# true peer# ipv6:[::1]:47002 2024-11-21T23:13:21.465340Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade attach Name# Session actor# [1:7439874640797514851:2250] peer# ipv6:[::1]:47002 2024-11-21T23:13:21.465370Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade read Name# Session peer# ipv6:[::1]:47002 2024-11-21T23:13:21.465581Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] read finished Name# Session ok# false data# peer# ipv6:[::1]:47002 2024-11-21T23:13:21.465680Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2024-11-21T23:13:21.465735Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade finish Name# Session peer# ipv6:[::1]:47002 grpc status# (9) message# Everything is A-OK 2024-11-21T23:13:21.466467Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream done notification Name# Session ok# true peer# ipv6:[::1]:47002 2024-11-21T23:13:21.466546Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream finished Name# Session ok# true peer# ipv6:[::1]:47002 grpc status# (9) message# Everything is A-OK 2024-11-21T23:13:21.466603Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] deregistering request Name# Session peer# ipv6:[::1]:47002 (finish done) 2024-11-21T23:13:21.467277Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone >> TGRpcStreamingTest::ReadFinish [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 16708, MsgBus: 27284 2024-11-21T23:12:29.791716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874420337852568:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:29.791837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001184/r3tmp/tmpf5S7Mq/pdisk_1.dat 2024-11-21T23:12:30.520568Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.523113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:30.523196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:30.529393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16708, node 1 2024-11-21T23:12:30.782914Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:30.782934Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:30.782939Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:30.783021Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27284 TClient is connected to server localhost:27284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:31.500009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:33.828508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874437517722248:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.828652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.828750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874437517722256:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.833942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:12:33.848680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874437517722262:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:12:34.498699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:34.686275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.691455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:34.691819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:34.691949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:34.692069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:34.692172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:34.692266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:34.692369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:34.692474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:34.692571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:34.692727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:34.692908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874441812689796:2317];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:34.731575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.731640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:34.731873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:34.731979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:34.732079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:34.732176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:34.732263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:34.732363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:34.732465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:34.732586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:34.732685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:34.732780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441812689805:2322];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:34.760444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439874441812689797:2318];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.760515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439874441812689797:2318];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:34.760715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439874441812689797:2318];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:34.760807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7439874441812689797:2318];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstr ... :16.560508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7439874576393288162:3032];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.560658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[2:7439874559213415386:2476];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037919;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.560795Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[2:7439874559213415427:2500];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037909;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.560931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7439874576393288190:3056];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.561066Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7439874576393288133:3005];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.561365Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7439874576393287964:2939];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.561738Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[2:7439874554918447791:2404];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037967;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.561886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7439874576393288023:2978];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.562351Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7439874576393288166:3035];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.563573Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7439874576393288173:3042];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.565507Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[2:7439874576393287971:2946];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037999;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.565749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7439874576393288153:3023];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.566041Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7439874576393288160:3030];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.566212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7439874576393288185:3052];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.566787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7439874576393288203:3060];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.566964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7439874576393288274:3064];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.567103Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037914;self_id=[2:7439874559213415523:2516];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037914;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.567235Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037940;self_id=[2:7439874559213415380:2472];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037940;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.567240Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7439874559213415458:2509];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037918;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.567376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037935;self_id=[2:7439874559213415413:2493];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037935;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.567402Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7439874576393288206:3062];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.567514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7439874554918446637:2325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.567546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;self_id=[2:7439874554918448022:2462];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037957;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.568037Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[2:7439874576393288137:3009];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.603169Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7439874576393288128:3000];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.603490Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7439874576393288177:3045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.603698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7439874576393288129:3001];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.603892Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439874554918447358:2377];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037993;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.605257Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[2:7439874554918447951:2430];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037958;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.605518Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[2:7439874576393288020:2975];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038083;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.605703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7439874576393288083:2994];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.605886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7439874576393288033:2981];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.607232Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[2:7439874576393288128:3000];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.607457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7439874576393288177:3045];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.607657Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7439874576393288129:3001];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.607810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439874554918447358:2377];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037993;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.607951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[2:7439874554918447951:2430];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037958;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T23:13:16.608098Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[2:7439874576393288020:2975];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038083;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.608244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7439874576393288083:2994];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.608387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7439874576393288033:2981];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.608648Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[2:7439874554918447769:2396];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037983;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.608890Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037906;self_id=[2:7439874559213415454:2508];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037906;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.609106Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;self_id=[2:7439874559213415532:2517];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037908;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.640553Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=3082c580-a85e11ef-bfefb130-a32459b8;fline=with_appended.cpp:80;portions=3,;task_id=3082c580-a85e11ef-bfefb130-a32459b8; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TS3WrapperTests::AbortMultipartUpload >> TS3WrapperTests::MultipartUpload >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_25 [GOOD] Test command err: 2024-11-21T23:10:34.314598Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873925285175026:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:34.321232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:34.605250Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002be6/r3tmp/tmp5wu5pL/pdisk_1.dat 2024-11-21T23:10:34.930172Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:34.949071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:34.949182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:34.951271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16482, node 1 2024-11-21T23:10:35.110692Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002be6/r3tmp/yandexqqKafi.tmp 2024-11-21T23:10:35.110714Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002be6/r3tmp/yandexqqKafi.tmp 2024-11-21T23:10:35.110870Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002be6/r3tmp/yandexqqKafi.tmp 2024-11-21T23:10:35.110976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:35.196944Z INFO: TTestServer started on Port 22911 GrpcPort 16482 TClient is connected to server localhost:22911 PQClient connected to localhost:16482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:35.627693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:35.642469Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:35.658541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:35.664158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:10:35.798640Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:35.811034Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:10:37.891351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873938170077558:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:37.891539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:37.891868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873938170077573:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:37.896917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:37.914976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873938170077575:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:38.332139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:38.374028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:38.374615Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873942465044944:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:38.375958Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjU4ZDcyY2QtZTgxMDA2MDEtY2I1MjNlN2YtNGU4YzRkNWU=, ActorId: [1:7439873938170077535:2306], ActorState: ExecuteState, TraceId: 01jd8fvt9t2kchetqmc194ps91, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:38.377866Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:38.477421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873942465045217:2619] 2024-11-21T23:10:39.308842Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873925285175026:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:39.308934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:44.995929Z :WriteToTopic_Demo_21_RestartNo INFO: TTopicSdkTestSetup started 2024-11-21T23:10:45.012703Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:45.033594Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873972529816621:2811] connected; active server actors: 1 2024-11-21T23:10:45.034239Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:45.034909Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:45.035063Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:45.036231Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:45.042480Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:45.042837Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:45.043441Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:45.043668Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:45.043747Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:45.043778Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:45.043818Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:45.043912Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:45.043947Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:45.043963Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:45.047192Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:45.047250Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:45.047318Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873972529816645:2438], now have 1 active actors on pipe 2024-11-21T23:10:45.076196Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:45.076240Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873972529816618:2810], now have 1 active actors on pipe 2024-11-21T23:10:45.098963Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439873925285175273 RawX2: 4294969476 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ... in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 2 2024-11-21T23:13:18.680944Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_5328613906770633023_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 3 2024-11-21T23:13:19.381249Z node 10 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:13:19.382354Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0 describe result for acl check 2024-11-21T23:13:19.476650Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_B:0:1:2:3 2024-11-21T23:13:19.476741Z :INFO: [/Root] [/Root] [cd56da2a-b31f1ee3-b8d9e81a-ef8a7097] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:13:19.582344Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_5328613906770633023_v1 checking auth because of timeout 2024-11-21T23:13:19.582426Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_5328613906770633023_v1 auth for : test-consumer 2024-11-21T23:13:19.584541Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_5328613906770633023_v1 Handle describe topics response 2024-11-21T23:13:19.584633Z node 10 :PQ_READ_PROXY DEBUG: session cookie 2 consumer test-consumer session test-consumer_10_2_5328613906770633023_v1 auth is DEAD 2024-11-21T23:13:19.584701Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_5328613906770633023_v1 auth ok: topics# 1, initDone# 1 2024-11-21T23:13:20.492822Z :INFO: [/Root] [/Root] [cd56da2a-b31f1ee3-b8d9e81a-ef8a7097] Closing read session. Close timeout: 0.000000s 2024-11-21T23:13:20.492880Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_B:0:1:2:3 2024-11-21T23:13:20.492930Z :INFO: [/Root] [/Root] [cd56da2a-b31f1ee3-b8d9e81a-ef8a7097] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2016 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:13:20.493054Z :NOTICE: [/Root] [/Root] [cd56da2a-b31f1ee3-b8d9e81a-ef8a7097] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:13:20.493103Z :DEBUG: [/Root] [/Root] [cd56da2a-b31f1ee3-b8d9e81a-ef8a7097] [] Abort session to cluster 2024-11-21T23:13:20.500490Z :NOTICE: [/Root] [/Root] [cd56da2a-b31f1ee3-b8d9e81a-ef8a7097] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:13:20.504029Z :INFO: [/Root] [/Root] [2777a1ff-990f2b3d-19ce51d4-95659ddd] Closing read session. Close timeout: 0.000000s 2024-11-21T23:13:20.504095Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:2:0 2024-11-21T23:13:20.504134Z :INFO: [/Root] [/Root] [2777a1ff-990f2b3d-19ce51d4-95659ddd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 4182 BytesRead: 30 MessagesRead: 3 BytesReadCompressed: 30 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:13:20.504220Z :NOTICE: [/Root] [/Root] [2777a1ff-990f2b3d-19ce51d4-95659ddd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:13:20.504258Z :DEBUG: [/Root] [/Root] [2777a1ff-990f2b3d-19ce51d4-95659ddd] [] Abort session to cluster 2024-11-21T23:13:20.504677Z :NOTICE: [/Root] [/Root] [2777a1ff-990f2b3d-19ce51d4-95659ddd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:13:20.505531Z :INFO: [/Root] SessionId [test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T23:13:20.505654Z :INFO: [/Root] SessionId [test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T23:13:20.505691Z :DEBUG: [/Root] SessionId [test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T23:13:20.514015Z :INFO: [/Root] SessionId [test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T23:13:20.514061Z :DEBUG: [/Root] SessionId [test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T23:13:20.516720Z :INFO: [/Root] SessionId [test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T23:13:20.516779Z :INFO: [/Root] SessionId [test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T23:13:20.516812Z :DEBUG: [/Root] SessionId [test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T23:13:20.517506Z :INFO: [/Root] SessionId [test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T23:13:20.517520Z :DEBUG: [/Root] SessionId [test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T23:13:20.517579Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2024-11-21T23:13:20.517607Z :DEBUG: [/Root] SessionId [test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2024-11-21T23:13:20.517653Z :DEBUG: [/Root] SessionId [test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T23:13:20.548657Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_8846558423510871749_v1 grpc read done: success# 0, data# { } 2024-11-21T23:13:20.548698Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_8846558423510871749_v1 grpc read failed 2024-11-21T23:13:20.548726Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_8846558423510871749_v1 grpc closed 2024-11-21T23:13:20.548763Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_8846558423510871749_v1 is DEAD 2024-11-21T23:13:20.550075Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_5328613906770633023_v1 grpc closed 2024-11-21T23:13:20.550117Z node 10 :PQ_READ_PROXY INFO: session cookie 2 consumer test-consumer session test-consumer_10_2_5328613906770633023_v1 is DEAD 2024-11-21T23:13:20.550906Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0 grpc read done: success: 0 data: 2024-11-21T23:13:20.550923Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0 grpc read failed 2024-11-21T23:13:20.550947Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0 grpc closed 2024-11-21T23:13:20.550963Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message_group_id|f039ab9f-76414f37-d86e706a-9ca0a984_0 is DEAD 2024-11-21T23:13:20.551668Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:13:20.551708Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:13:20.551789Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0 grpc closed 2024-11-21T23:13:20.551805Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|76121c01-97859682-8600f0c4-2f46286d_0 is DEAD 2024-11-21T23:13:20.552369Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:13:20.553654Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439874621016090223:2502] disconnected; active server actors: 1 2024-11-21T23:13:20.553676Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439874621016090223:2502] client test-consumer disconnected session test-consumer_10_1_8846558423510871749_v1 2024-11-21T23:13:20.553773Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic_B] pipe [10:7439874629606025031:2559] disconnected; active server actors: 1 2024-11-21T23:13:20.553790Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic_B] pipe [10:7439874629606025031:2559] client test-consumer disconnected session test-consumer_10_2_5328613906770633023_v1 2024-11-21T23:13:20.553845Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:20.553876Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439874629606024934:2537] destroyed 2024-11-21T23:13:20.553897Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:20.553922Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439874629606024937:2537] destroyed 2024-11-21T23:13:20.553937Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:20.553955Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_10_2_5328613906770633023_v1 2024-11-21T23:13:20.553977Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [10:7439874629606025034:2562] destroyed 2024-11-21T23:13:20.553996Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:20.554022Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439874621016090206:2496] destroyed 2024-11-21T23:13:20.554039Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:20.554053Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_8846558423510871749_v1 2024-11-21T23:13:20.554073Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439874621016090226:2505] destroyed 2024-11-21T23:13:20.554568Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:13:20.554644Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_2_5328613906770633023_v1 2024-11-21T23:13:20.554669Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_8846558423510871749_v1 2024-11-21T23:13:20.554696Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:13:20.797288Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 >> TS3WrapperTests::GetUnknownObject |84.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> TS3WrapperTests::MultipartUpload [GOOD] |84.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |84.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> TS3WrapperTests::GetUnknownObject [GOOD] >> BsControllerTest::TestLocalSelfHeal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2024-11-21T23:13:21.607246Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874641119978146:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:21.607563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001232/r3tmp/tmph00Id1/pdisk_1.dat 2024-11-21T23:13:21.997545Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:22.002160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:22.002262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:22.016365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:22.069071Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream accepted Name# Session ok# true peer# ipv6:[::1]:56824 2024-11-21T23:13:22.069330Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade attach Name# Session actor# [1:7439874645414945794:2250] peer# ipv6:[::1]:56824 2024-11-21T23:13:22.069352Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade read Name# Session peer# ipv6:[::1]:56824 2024-11-21T23:13:22.069416Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade write Name# Session data# peer# ipv6:[::1]:56824 2024-11-21T23:13:22.069680Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] facade finish Name# Session peer# ipv6:[::1]:56824 grpc status# (0) message# 2024-11-21T23:13:22.069787Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] write finished Name# Session ok# true peer# ipv6:[::1]:56824 2024-11-21T23:13:22.070037Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream done notification Name# Session ok# true peer# ipv6:[::1]:56824 2024-11-21T23:13:22.070079Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] stream finished Name# Session ok# true peer# ipv6:[::1]:56824 grpc status# (0) message# 2024-11-21T23:13:22.070079Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] read finished Name# Session ok# false data# peer# ipv6:[::1]:56824 2024-11-21T23:13:22.070110Z node 1 :GRPC_SERVER DEBUG: [0x51f00002ae80] deregistering request Name# Session peer# ipv6:[::1]:56824 (read done) 2024-11-21T23:13:22.070142Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2024-11-21T23:13:22.070157Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2024-11-21T23:13:22.070172Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] Test command err: Trying to start YDB, gRPC: 7402, MsgBus: 11941 2024-11-21T23:12:29.712068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874420212012958:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:29.712202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00118f/r3tmp/tmpdTM1GL/pdisk_1.dat 2024-11-21T23:12:30.432952Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:30.442325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:30.450541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:30.459888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7402, node 1 2024-11-21T23:12:30.674806Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:30.674826Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:30.674832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:30.674915Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11941 TClient is connected to server localhost:11941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:31.549908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:33.960688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874437391882629:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.960805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.966520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874437391882650:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:33.970730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:12:33.990074Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:12:33.990327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874437391882652:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:12:34.515918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:34.752185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.752432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:34.753061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:34.753209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:34.753325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:34.753433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:34.753530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:34.753624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:34.753725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:34.753822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:34.753918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:34.754014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7439874441686850156:2317];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:34.756783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.756850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:34.757069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:34.757154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:34.757251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:34.757330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:34.757393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:34.757466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:34.757681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:34.757768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:34.757896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:34.758012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874441686850159:2318];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:34.794661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874441686850160:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:34.794737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874441686850160:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:34.794980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874441686850160:2319];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:34.795113Z node 1 ... kip_indexation;reason=disabled; 2024-11-21T23:13:16.939815Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7439874570931273936:2988];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.940077Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7439874570931273908:2964];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.941311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7439874575226241700:3110];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.941499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7439874570931274129:3048];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.941640Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[2:7439874570931274127:3046];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.941772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7439874575226241791:3117];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.941925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7439874575226241627:3101];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.943990Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7439874570931273962:3010];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.944779Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7439874575226241607:3091];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.947103Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7439874575226241698:3109];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.947422Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7439874570931274119:3041];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.948481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[2:7439874575226241603:3089];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038030;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.948636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[2:7439874575226241695:3108];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038034;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.948770Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7439874575226241714:3111];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.948891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7439874575226241626:3100];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.952470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7439874575226241750:3115];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.952708Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7439874570931273889:2951];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.953921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7439874575226241628:3102];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.954064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7439874575226241609:3092];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.954175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7439874570931274117:3039];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.954284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7439874575226241589:3087];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.959281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7439874570931273957:3005];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:16.961828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7439874570931273908:2964];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.271081Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7439874570931273087:2901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.271443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7439874570931273092:2906];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.275916Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7439874570931273008:2878];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037998;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.277258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7439874570931273087:2901];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.277497Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7439874570931273092:2906];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.281304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037998;self_id=[2:7439874570931273008:2878];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037998;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.286561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;local_tx_no=11;tx_info=TTxBlobsWritingFinished;tablet_id=72075186224037993;tx_state=complete;fline=interaction.h:353;batch=Key: [ 1 ] ;info={"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":2},"id":281474976710665}],"finishes":[{"inc":{"count_include":2},"id":281474976710665}]},"p":{"include":0,"pk":"1;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710665}],"finishes":[{"inc":{"count_include":1},"id":281474976710665}]},"p":{"include":0,"pk":"4000000001;"}}]}; 2024-11-21T23:13:17.295616Z node 2 :KQP_EXECUTER WARN: ActorId: [2:7439874618175918826:3636] TxId: 281474976710668. Ctx: { TraceId: 01jd8g0krh97xwxgepsee6ry21, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTY1ZmFlNDYtYmVkN2QxZjctZjBlMjVlNzItY2MzYTQyNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event: NYql::NDq::TEvDqCompute::TEvState, at state: FinalizeState 2024-11-21T23:13:17.302166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7439874570931273102:2910];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038002;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.302786Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7439874570931273102:2910];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038002;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:17.441398Z node 2 :TX_COLUMNSHARD ERROR: tablet_id=72075186224038022;self_id=[2:7439874575226241619:3096];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224038022;event=TEvWrite;fline=events.h:97;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=0; 2024-11-21T23:13:17.458673Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439874618175918825:3636], SessionActorId: [2:7439874596701081506:3636], Got LOCKS BROKEN for table. ShardID=72075186224038022, Sink=[2:7439874618175918825:3636].{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T23:13:17.458820Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439874618175918825:3636], SessionActorId: [2:7439874596701081506:3636], Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 }. statusCode=ABORTED. subIssues=
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 . sessionActorId=[2:7439874596701081506:3636]. isRollback=0 2024-11-21T23:13:17.459076Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTY1ZmFlNDYtYmVkN2QxZjctZjBlMjVlNzItY2MzYTQyNGQ=, ActorId: [2:7439874596701081506:3636], ActorState: ExecuteState, TraceId: 01jd8g0krh97xwxgepsee6ry21, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7439874622470887196:3636] from: [2:7439874618175918825:3636] 2024-11-21T23:13:17.459319Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439874622470887196:3636] TxId: 281474976710669. Ctx: { TraceId: 01jd8g0krh97xwxgepsee6ry21, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTY1ZmFlNDYtYmVkN2QxZjctZjBlMjVlNzItY2MzYTQyNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated.{
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 };
: Fatal: tablet lock have another internal generation counter: 18446744073709551615 != 0 } 2024-11-21T23:13:17.459761Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTY1ZmFlNDYtYmVkN2QxZjctZjBlMjVlNzItY2MzYTQyNGQ=, ActorId: [2:7439874596701081506:3636], ActorState: ExecuteState, TraceId: 01jd8g0krh97xwxgepsee6ry21, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> BsControllerTest::SelfHealMirror3dc >> BsControllerTest::DecommitRejected ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2024-11-21T23:13:25.119169Z node 1 :S3_WRAPPER NOTICE: Request: uuid# DED2DBAD-BA49-4F6B-9AE5-9041E945CD34, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:9668 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5FEFBC48-C117-4937-BBBC-4CDDF5848829 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T23:13:25.140756Z node 1 :S3_WRAPPER NOTICE: Response: uuid# DED2DBAD-BA49-4F6B-9AE5-9041E945CD34, response# No response body. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2024-11-21T23:13:21.782986Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874640570378399:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:21.783052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001269/r3tmp/tmpYqvZi5/pdisk_1.dat 2024-11-21T23:13:22.178202Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:22.199117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:22.199401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:22.205236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:22.222747Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream accepted Name# Session ok# true peer# ipv6:[::1]:42100 2024-11-21T23:13:22.223075Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade attach Name# Session actor# [1:7439874644865345968:2251] peer# ipv6:[::1]:42100 2024-11-21T23:13:22.223120Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade read Name# Session peer# ipv6:[::1]:42100 2024-11-21T23:13:22.223213Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] facade finish Name# Session peer# ipv6:[::1]:42100 grpc status# (0) message# 2024-11-21T23:13:22.223725Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream done notification Name# Session ok# true peer# ipv6:[::1]:42100 2024-11-21T23:13:22.223770Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] read finished Name# Session ok# false data# peer# ipv6:[::1]:42100 2024-11-21T23:13:22.223804Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] stream finished Name# Session ok# true peer# ipv6:[::1]:42100 grpc status# (0) message# 2024-11-21T23:13:22.223862Z node 1 :GRPC_SERVER DEBUG: [0x51f00002bc80] deregistering request Name# Session peer# ipv6:[::1]:42100 (finish done) 2024-11-21T23:13:22.223871Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 >> TS3WrapperTests::CompleteUnknownUpload |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2024-11-21T23:13:25.781250Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 958772F9-958A-4791-9D1E-126E819A1746, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:12428 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9859860C-DF91-484E-B1A9-CAA40E52A90F amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2024-11-21T23:13:25.786529Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 958772F9-958A-4791-9D1E-126E819A1746, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2024-11-21T23:13:25.786965Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4415A39F-255D-4577-A7FB-25BB2C043DBF, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:12428 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 475D0D0B-036B-4631-A8C1-3B41AD8E4D66 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2024-11-21T23:13:25.790792Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4415A39F-255D-4577-A7FB-25BB2C043DBF, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T23:13:25.791347Z node 1 :S3_WRAPPER NOTICE: Request: uuid# EB864BAC-2AC9-43A4-91ED-96107B901503, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:12428 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 06C523DA-0011-46B6-8251-00CFB21913CA amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2024-11-21T23:13:25.795509Z node 1 :S3_WRAPPER NOTICE: Response: uuid# EB864BAC-2AC9-43A4-91ED-96107B901503, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2024-11-21T23:13:25.796056Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4648B46D-C340-4BF7-8776-428B5CB5A6FA, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:12428 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F1013F7E-31E2-435F-98CB-9EFECABA6E93 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2024-11-21T23:13:25.799187Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4648B46D-C340-4BF7-8776-428B5CB5A6FA, response# GetObjectResult { } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2024-11-21T23:13:26.086129Z node 1 :S3_WRAPPER NOTICE: Request: uuid# EE5769A2-176E-45C4-8B99-A409E9EDC45C, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:22994 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7B7C655B-CE9C-431E-9CDC-346964D68418 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T23:13:26.092188Z node 1 :S3_WRAPPER NOTICE: Response: uuid# EE5769A2-176E-45C4-8B99-A409E9EDC45C, response# No response body. |84.1%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2024-11-21T23:13:25.789315Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D2518EA8-2FB7-44FA-A6AD-00FA26CD24E0, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:3596 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1BA4EB37-B227-4E6C-AE0B-6D0D27740800 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2024-11-21T23:13:25.804478Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D2518EA8-2FB7-44FA-A6AD-00FA26CD24E0, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2024-11-21T23:13:25.805265Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 8C842441-DDEC-4E9E-816A-C7CA38936AC5, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:3596 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F0C47C39-92DF-46DD-A3CE-F2F4BD1309FB amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2024-11-21T23:13:25.811288Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 8C842441-DDEC-4E9E-816A-C7CA38936AC5, response# AbortMultipartUploadResult { } 2024-11-21T23:13:25.814680Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 65C1F87B-BDC3-45E7-ABAD-0AE71EA0A7DC, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:3596 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6FEDEAF3-95F3-45C9-BCE9-CCE7C5D1BFD4 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2024-11-21T23:13:25.824417Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 65C1F87B-BDC3-45E7-ABAD-0AE71EA0A7DC, response# No response body. >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2024-11-21T23:13:23.480233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:13:23.483862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:13:23.484053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002046/r3tmp/tmpQwFeOk/pdisk_1.dat 2024-11-21T23:13:23.995755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:13:24.046474Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:24.107397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:24.107628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:24.119764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:24.262229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:13:24.852887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:24.852998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:832:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:24.853079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:24.858018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:13:25.075229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:836:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:13:25.609284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8g0xbjf95t38nxdrrbpz9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA3OWI4ZWYtOGY3MWE3YmItMjUzYzNmYjYtZWIwODU4MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2024-11-21T23:13:26.311718Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T23:13:26.311787Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T23:13:26.311859Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T23:13:26.311881Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T23:13:26.311915Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T23:13:26.311937Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T23:13:26.311970Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T23:13:26.311990Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T23:13:26.312038Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T23:13:26.312068Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T23:13:26.312118Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T23:13:26.312139Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T23:13:26.312199Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T23:13:26.312225Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T23:13:26.312259Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T23:13:26.312277Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T23:13:26.312321Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T23:13:26.312340Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T23:13:26.312370Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T23:13:26.312389Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T23:13:26.312425Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T23:13:26.312445Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T23:13:26.312479Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T23:13:26.312497Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T23:13:26.312533Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T23:13:26.312556Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T23:13:26.312607Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T23:13:26.312628Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T23:13:26.312684Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T23:13:26.312706Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T23:13:26.336944Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:508:32] Status# ERROR ClientId# [1:508:32] ServerId# [0:0:0] PipeClient# [1:508:32] 2024-11-21T23:13:26.337590Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:509:20] Status# ERROR ClientId# [2:509:20] ServerId# [0:0:0] PipeClient# [2:509:20] 2024-11-21T23:13:26.337639Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:510:20] Status# ERROR ClientId# [3:510:20] ServerId# [0:0:0] PipeClient# [3:510:20] 2024-11-21T23:13:26.337675Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:511:20] Status# ERROR ClientId# [4:511:20] ServerId# [0:0:0] PipeClient# [4:511:20] 2024-11-21T23:13:26.337724Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:512:20] Status# ERROR ClientId# [5:512:20] ServerId# [0:0:0] PipeClient# [5:512:20] 2024-11-21T23:13:26.337779Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:513:20] Status# ERROR ClientId# [6:513:20] ServerId# [0:0:0] PipeClient# [6:513:20] 2024-11-21T23:13:26.337816Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:514:20] Status# ERROR ClientId# [7:514:20] ServerId# [0:0:0] PipeClient# [7:514:20] 2024-11-21T23:13:26.337874Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:515:20] Status# ERROR ClientId# [8:515:20] ServerId# [0:0:0] PipeClient# [8:515:20] 2024-11-21T23:13:26.337918Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:516:20] Status# ERROR ClientId# [9:516:20] ServerId# [0:0:0] PipeClient# [9:516:20] 2024-11-21T23:13:26.337966Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:517:20] Status# ERROR ClientId# [10:517:20] ServerId# [0:0:0] PipeClient# [10:517:20] 2024-11-21T23:13:26.338016Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:518:20] Status# ERROR ClientId# [11:518:20] ServerId# [0:0:0] PipeClient# [11:518:20] 2024-11-21T23:13:26.338053Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:519:20] Status# ERROR ClientId# [12:519:20] ServerId# [0:0:0] PipeClient# [12:519:20] 2024-11-21T23:13:26.338094Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:520:20] Status# ERROR ClientId# [13:520:20] ServerId# [0:0:0] PipeClient# [13:520:20] 2024-11-21T23:13:26.338145Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:521:20] Status# ERROR ClientId# [14:521:20] ServerId# [0:0:0] PipeClient# [14:521:20] 2024-11-21T23:13:26.338180Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:522:20] Status# ERROR ClientId# [15:522:20] ServerId# [0:0:0] PipeClient# [15:522:20] 2024-11-21T23:13:26.382205Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2024-11-21T23:13:26.382310Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2024-11-21T23:13:26.382357Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2024-11-21T23:13:26.382414Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2024-11-21T23:13:26.382454Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2024-11-21T23:13:26.382503Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2024-11-21T23:13:26.382570Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2024-11-21T23:13:26.382614Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2024-11-21T23:13:26.382660Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2024-11-21T23:13:26.382696Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2024-11-21T23:13:26.382730Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2024-11-21T23:13:26.382768Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2024-11-21T23:13:26.382801Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2024-11-21T23:13:26.382841Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2024-11-21T23:13:26.382873Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2024-11-21T23:13:26.385168Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:574:58] Status# OK ClientId# [1:574:58] ServerId# [1:603:59] PipeClient# [1:574:58] 2024-11-21T23:13:26.385222Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2024-11-21T23:13:26.389316Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:575:21] Status# OK ClientId# [2:575:21] ServerId# [1:604:60] PipeClient# [2:575:21] 2024-11-21T23:13:26.389368Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2024-11-21T23:13:26.389418Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:576:21] Status# OK ClientId# [3:576:21] ServerId# [1:605:61] PipeClient# [3:576:21] 2024-11-21T23:13:26.389442Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2024-11-21T23:13:26.389475Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:577:21] Status# OK ClientId# [4:577:21] ServerId# [1:606:62] PipeClient# [4:577:21] 2024-11-21T23:13:26.389497Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2024-11-21T23:13:26.389527Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:578:21] Status# OK ClientId# [5:578:21] ServerId# [1:607:63] PipeClient# [5:578:21] 2024-11-21T23:13:26.389547Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2024-11-21T23:13:26.389614Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:579:21] Status# OK ClientId# [6:579:21] ServerId# [1:608:64] PipeClient# [6:579:21] 2024-11-21T23:13:26.389638Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2024-11-21T23:13:26.389670Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:580:21] Status# OK ClientId# [7:580:21] ServerId# [1:609:65] PipeClient# [7:580:21] 2024-11-21T23:13:26.389735Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2024-11-21T23:13:26.389803Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:581:21] Status# OK ClientId# [8:581:21] ServerId# [1:610:66] PipeClient# [8:581:21] 2024-11-21T23:13:26.389834Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2024-11-21T23:13:26.389872Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:582:21] Status# OK ClientId# [9:582:21] ServerId# [1:611:67] PipeClient# [9:582:21] 2024-11-21T23:13:26.389895Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2024-11-21T23:13:26.389932Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:583:21] Status# OK ClientId# [10:583:21] ServerId# [1:612:68] PipeClient# [10:583:21] 2024-11-21T23:13:26.389957Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2024-11-21T23:13:26.389994Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:584:21] Status# OK ClientId# [11:584:21] ServerId# [1:613:69] PipeClient# [11:584:21] 2024-11-21T23:13:26.390016Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2024-11-21T23:13:26.390055Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:585:21] Status# OK ClientId# [12:585:21] ServerId# [1:614:70] PipeClient# [12:585:21] 2024-11-21T23:13:26.390078Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2024-11-21T23:13:26.390113Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:586:21] Status# OK ClientId# [13:586:21] ServerId# [1:615:71] PipeClient# [13:586:21] 2024-11-21T23:13:26.390151Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2024-11-21T23:13:26.390190Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:587:21] Status# OK ClientId# [14:587:21] ServerId# [1:616:72] PipeClient# [14:587:21] 2024-11-21T23:13:26.390214Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2024-11-21T23:13:26.390249Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:588:21] Status# OK ClientId# [15:588:21] ServerId# [1:617:73] PipeClient# [15:588:21] 2024-11-21T23:13:26.390276Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2024-11-21T23:13:26.393213Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:26.393308Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T23:13:26.414751Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2024-11-21T23:13:26.415890Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T23:13:26.415962Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T23:13:26.416052Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2024-11-21T23:13:26.416234Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T23:13:26.416283Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T23:13:26.416336Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2024-11-21T23:13:26.416439Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T23:13:26.416472Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T23:13:26.416518Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2024-11-21T23:13:26.416609Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T23:13:26.416650Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2024-11-21T23:13:26.416689Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2024-11-21T2 ... 2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.681568Z 2 00h01m23.916512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2024-11-21T23:13:26.681801Z 1 00h01m23.916512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.682080Z 12 00h01m26.884512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2024-11-21T23:13:26.682381Z 1 00h01m26.884512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.682608Z 1 00h01m27.888512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2024-11-21T23:13:26.682756Z 1 00h01m27.888512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.682998Z 1 00h01m29.648024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.683257Z 1 00h01m30.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.683424Z 1 00h01m31.510512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.683624Z 1 00h01m34.261512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.683742Z 10 00h01m34.885512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2024-11-21T23:13:26.683950Z 1 00h01m34.885512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.684343Z 1 00h01m37.984512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.684478Z 11 00h01m38.443512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2024-11-21T23:13:26.684668Z 1 00h01m38.443512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.684865Z 1 00h01m38.676512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.685053Z 1 00h01m38.916512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:466} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2024-11-21T23:13:26.685203Z 14 00h01m39.023536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2024-11-21T23:13:26.685420Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483648 2024-11-21T23:13:26.685874Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T23:13:26.685907Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2024-11-21T23:13:26.686177Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T23:13:26.686202Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2024-11-21T23:13:26.686222Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T23:13:26.686238Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2024-11-21T23:13:26.686267Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T23:13:26.686296Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2024-11-21T23:13:26.686340Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T23:13:26.686376Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2024-11-21T23:13:26.686428Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T23:13:26.686452Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2024-11-21T23:13:26.686478Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T23:13:26.686504Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2024-11-21T23:13:26.686529Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2024-11-21T23:13:26.686553Z 1 00h01m39.023536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2024-11-21T23:13:26.688737Z 1 00h01m39.024048s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2024-11-21T23:13:26.688796Z 1 00h01m39.024048s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2024-11-21T23:13:26.689290Z 1 00h01m39.024048s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2024-11-21T23:13:26.689349Z 1 00h01m39.024048s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483648 Success# true 2024-11-21T23:13:26.689476Z 8 00h01m39.024048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T23:13:26.689515Z 8 00h01m39.024048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2024-11-21T23:13:26.689609Z 2 00h01m39.024048s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2024-11-21T23:13:26.689653Z 2 00h01m39.024048s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2024-11-21T23:13:26.689744Z 3 00h01m39.024048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2024-11-21T23:13:26.689787Z 3 00h01m39.024048s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2024-11-21T23:13:26.689874Z 4 00h01m39.024048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2024-11-21T23:13:26.689911Z 4 00h01m39.024048s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2024-11-21T23:13:26.690025Z 5 00h01m39.024048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T23:13:26.690078Z 5 00h01m39.024048s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2024-11-21T23:13:26.690144Z 6 00h01m39.024048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T23:13:26.690178Z 6 00h01m39.024048s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2024-11-21T23:13:26.690224Z 9 00h01m39.024048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T23:13:26.690298Z 13 00h01m39.024048s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2024-11-21T23:13:26.690350Z 13 00h01m39.024048s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2024-11-21T23:13:26.690494Z 14 00h01m39.024048s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T23:13:26.690578Z 14 00h01m39.024048s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2024-11-21T23:13:26.690672Z 15 00h01m39.024048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2024-11-21T23:13:26.690715Z 15 00h01m39.024048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2024-11-21T23:13:26.690789Z 15 00h01m39.024048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2024-11-21T23:13:26.692558Z 15 00h01m43.777048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2024-11-21T23:13:26.696037Z 15 00h02m14.969048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2024-11-21T23:13:26.696982Z 9 00h02m14.969560s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T23:13:26.697056Z 9 00h02m14.969560s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] |84.1%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2024-11-21T23:13:26.863615Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 88BCEBE9-E2B8-42EA-A6EB-ECAC0D00B696, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:31598 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 44C034E3-3D50-4832-A8F7-D88955C6BE47 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2024-11-21T23:13:26.871796Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 88BCEBE9-E2B8-42EA-A6EB-ECAC0D00B696, response# >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] |84.1%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubscriberTest::NotifyUpdate >> TSubscriberTest::InvalidNotification >> TSubscriberTest::Sync >> BasicUsage::ReadSessionCorrectClose [GOOD] >> BasicUsage::ConflictingWrites >> TSubscriberCombinationsTest::CombinationsRootDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 17531, MsgBus: 31511 2024-11-21T23:12:39.817546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874461960806746:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:39.817815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001133/r3tmp/tmpKod2Nb/pdisk_1.dat 2024-11-21T23:12:40.613477Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:40.643803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:40.643874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:40.646326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17531, node 1 2024-11-21T23:12:40.831488Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:40.831507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:40.831513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:40.831583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31511 TClient is connected to server localhost:31511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:42.227757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:44.785797Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874461960806746:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:44.785898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:45.460490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874487730611031:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.460651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.463023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874487730611043:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:45.467950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:12:45.502646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874487730611045:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:12:46.087719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:46.312048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:12:48.596782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:51.337920Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDE3MzU4OTMtZDExM2NiM2QtNDdmYjUxNzctNjAzZWFmMDI=, ActorId: [1:7439874509205455624:2943], ActorState: ExecuteState, TraceId: 01jd8fzw4m0fnjhkwpae1dcq12, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Tables: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2024-11-21T23:12:55.578634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:12:55.578658Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 18869, MsgBus: 12853 2024-11-21T23:12:58.110264Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874542015881778:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:58.110303Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001133/r3tmp/tmpIc9RHc/pdisk_1.dat 2024-11-21T23:12:58.469346Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:58.486126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:58.486195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:58.492097Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18869, node 2 2024-11-21T23:12:58.690973Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:58.690993Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:58.691004Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:58.691096Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12853 TClient is connected to server localhost:12853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:59.712684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:59.733930Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:13:03.118502Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874542015881778:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:03.118574Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:13:04.461010Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874567785686167:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:04.461073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874567785686197:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:04.461128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:04.465357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:13:04.498802Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:13:04.499231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874567785686204:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:13:04.793259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:13:05.030215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439874567785686455:2328];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:13:05.031295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7439874567785686455:2328];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granul ... 075186224038015;self_id=[2:7439874602145430510:3123];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.611605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7439874597850462707:3077];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.611698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7439874602145430340:3097];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.611736Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7439874602145430432:3108];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.611819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7439874602145430534:3140];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.611861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7439874597850462656:3061];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.611932Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7439874602145430349:3101];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.611980Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7439874602145430709:3175];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7439874602145430554:3153];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7439874602145430167:3086];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7439874597850462653:3058];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612312Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7439874602145430542:3146];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7439874602145430439:3110];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612539Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7439874602145430385:3106];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7439874602145430602:3158];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7439874602145430563:3154];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.612965Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7439874602145430347:3100];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038036;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.613132Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7439874602145430546:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.613276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7439874602145430465:3117];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.613418Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[2:7439874602145430484:3120];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.613554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7439874602145430508:3122];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.613690Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7439874602145430545:3147];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.613828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7439874602145430632:3169];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.613969Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7439874597850462676:3074];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.614094Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7439874602145430515:3127];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.616557Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7439874602145430548:3149];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.617591Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7439874602145430526:3137];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.617800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7439874602145430360:3105];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.617971Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7439874602145430618:3162];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.618128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7439874602145430539:3144];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.618281Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7439874602145430523:3134];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.628956Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7439874602145430546:3148];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.630179Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7439874602145430611:3160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.630366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7439874602145430333:3092];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.630641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7439874602145430333:3092];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.630775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7439874602145430526:3137];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.630912Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7439874602145430360:3105];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.631049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7439874602145430618:3162];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.631164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7439874602145430539:3144];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.631295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7439874602145430523:3134];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.632554Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7439874602145430525:3136];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.632680Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7439874602145430563:3154];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.632787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7439874602145430347:3100];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:20.633382Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7439874602145430611:3160];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 1422794459429874270 Reassign# 0 -- VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green } Status: "READY" Ready: true Put# [1:1:1:0:0:60:0] Put# [1:1:2:0:0:100:0] Put# [1:1:3:0:0:58:0] Put# [1:1:4:0:0:24:0] Put# [1:1:5:0:0:86:0] 2024-11-21T23:10:25.727966Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2024-11-21T23:10:25.730365Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7639613759602763740] 2024-11-21T23:10:25.741160Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:1:0:0:60:4] 2024-11-21T23:10:25.741248Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:2:0:0:100:5] 2024-11-21T23:10:25.741283Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:3:0:0:58:6] 2024-11-21T23:10:25.741336Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: RESURRECT: id# [1:1:4:0:0:24:6] 2024-11-21T23:10:25.741667Z 9 00h00m20.011024s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 4 PartsResurrected# 4 Put# [1:1:6:0:0:87:0] Put# [1:1:7:0:0:35:0] Put# [1:1:8:0:0:43:0] Put# [1:1:9:0:0:73:0] Put# [1:1:10:0:0:30:0] Put# [1:1:11:0:0:57:0] Put# [1:1:12:0:0:37:0] Put# [1:1:13:0:0:49:0] Put# [1:1:14:0:0:61:0] Put# [1:1:15:0:0:79:0] Put# [1:1:16:0:0:84:0] Put# [1:1:17:0:0:26:0] Put# [1:1:18:0:0:53:0] Put# [1:1:19:0:0:45:0] Put# [1:1:20:0:0:91:0] Put# [1:1:21:0:0:22:0] Put# [1:1:22:0:0:83:0] Put# [1:1:23:0:0:36:0] Put# [1:1:24:0:0:46:0] Put# [1:1:25:0:0:47:0] Put# [1:1:26:0:0:66:0] Put# [1:1:27:0:0:25:0] Put# [1:1:28:0:0:43:0] Put# [1:1:29:0:0:19:0] Put# [1:1:30:0:0:72:0] Put# [1:1:31:0:0:98:0] Put# [1:1:32:0:0:6:0] Put# [1:1:33:0:0:22:0] Put# [1:1:34:0:0:21:0] Put# [1:1:35:0:0:77:0] Put# [1:1:36:0:0:66:0] Put# [1:1:37:0:0:63:0] Put# [1:1:38:0:0:38:0] Put# [1:1:39:0:0:29:0] Put# [1:1:40:0:0:57:0] Put# [1:1:41:0:0:66:0] Put# [1:1:42:0:0:35:0] Put# [1:1:43:0:0:23:0] Put# [1:1:44:0:0:58:0] Put# [1:1:45:0:0:23:0] Put# [1:1:46:0:0:31:0] Put# [1:1:47:0:0:42:0] Put# [1:1:48:0:0:33:0] Put# [1:1:49:0:0:95:0] Put# [1:1:50:0:0:89:0] Put# [1:1:51:0:0:58:0] Put# [1:1:52:0:0:73:0] Put# [1:1:53:0:0:18:0] Put# [1:1:54:0:0:28:0] Put# [1:1:55:0:0:72:0] Put# [1:1:56:0:0:84:0] Put# [1:1:57:0:0:51:0] Put# [1:1:58:0:0:22:0] Put# [1:1:59:0:0:68:0] Put# [1:1:60:0:0:62:0] Put# [1:1:61:0:0:27:0] Put# [1:1:62:0:0:76:0] Put# [1:1:63:0:0:59:0] Put# [1:1:64:0:0:17:0] Put# [1:1:65:0:0:70:0] Put# [1:1:66:0:0:30:0] Put# [1:1:67:0:0:54:0] Put# [1:1:68:0:0:20:0] Put# [1:1:69:0:0:40:0] Put# [1:1:70:0:0:4:0] Put# [1:1:71:0:0:33:0] Put# [1:1:72:0:0:18:0] Put# [1:1:73:0:0:83:0] Put# [1:1:74:0:0:94:0] Put# [1:1:75:0:0:40:0] Put# [1:1:76:0:0:16:0] Put# [1:1:77:0:0:56:0] Put# [1:1:78:0:0:88:0] Put# [1:1:79:0:0:11:0] Put# [1:1:80:0:0:99:0] Put# [1:1:81:0:0:22:0] Put# [1:1:82:0:0:31:0] Put# [1:1:83:0:0:71:0] Put# [1:1:84:0:0:30:0] Put# [1:1:85:0:0:81:0] Put# [1:1:86:0:0:11:0] Put# [1:1:87:0:0:35:0] Put# [1:1:88:0:0:82:0] Put# [1:1:89:0:0:40:0] Put# [1:1:90:0:0:35:0] Put# [1:1:91:0:0:20:0] Put# [1:1:92:0:0:57:0] Put# [1:1:93:0:0:36:0] Put# [1:1:94:0:0:34:0] Put# [1:1:95:0:0:85:0] Put# [1:1:96:0:0:45:0] Put# [1:1:97:0:0:6:0] Put# [1:1:98:0:0:19:0] Put# [1:1:99:0:0:89:0] Put# [1:1:100:0:0:64:0] Put# [1:1:101:0:0:51:0] Put# [1:1:102:0:0:100:0] Put# [1:1:103:0:0:73:0] Put# [1:1:104:0:0:39:0] Put# [1:1:105:0:0:45:0] Put# [1:1:106:0:0:64:0] Put# [1:1:107:0:0:67:0] Put# [1:1:108:0:0:49:0] Put# [1:1:109:0:0:96:0] Put# [1:1:110:0:0:31:0] Put# [1:1:111:0:0:23:0] Put# [1:1:112:0:0:59:0] Put# [1:1:113:0:0:85:0] Put# [1:1:114:0:0:3:0] Put# [1:1:115:0:0:21:0] Put# [1:1:116:0:0:40:0] Put# [1:1:117:0:0:30:0] Put# [1:1:118:0:0:26:0] Put# [1:1:119:0:0:26:0] Put# [1:1:120:0:0:88:0] Put# [1:1:121:0:0:18:0] Put# [1:1:122:0:0:47:0] Put# [1:1:123:0:0:11:0] Put# [1:1:124:0:0:86:0] Put# [1:1:125:0:0:72:0] Put# [1:1:126:0:0:70:0] Put# [1:1:127:0:0:50:0] Put# [1:1:128:0:0:9:0] Put# [1:1:129:0:0:94:0] Put# [1:1:130:0:0:25:0] Put# [1:1:131:0:0:31:0] Put# [1:1:132:0:0:43:0] Put# [1:1:133:0:0:87:0] Put# [1:1:134:0:0:94:0] Put# [1:1:135:0:0:27:0] Put# [1:1:136:0:0:81:0] Put# [1:1:137:0:0:41:0] Put# [1:1:138:0:0:89:0] Put# [1:1:139:0:0:60:0] Put# [1:1:140:0:0:52:0] Put# [1:1:141:0:0:97:0] Put# [1:1:142:0:0:22:0] Put# [1:1:143:0:0:97:0] Put# [1:1:144:0:0:93:0] Put# [1:1:145:0:0:87:0] Put# [1:1:146:0:0:43:0] Put# [1:1:147:0:0:54:0] Put# [1:1:148:0:0:58:0] Put# [1:1:149:0:0:12:0] Put# [1:1:150:0:0:41:0] Put# [1:1:151:0:0:8:0] Put# [1:1:152:0:0:68:0] Put# [1:1:153:0:0:20:0] Put# [1:1:154:0:0:96:0] Put# [1:1:155:0:0:68:0] Put# [1:1:156:0:0:2:0] Put# [1:1:157:0:0:78:0] Put# [1:1:158:0:0:82:0] Put# [1:1:159:0:0:84:0] Put# [1:1:160:0:0:10:0] Put# [1:1:161:0:0:22:0] Put# [1:1:162:0:0:66:0] Put# [1:1:163:0:0:9:0] Put# [1:1:164:0:0:29:0] Put# [1:1:165:0:0:72:0] Put# [1:1:166:0:0:59:0] Put# [1:1:167:0:0:25:0] Put# [1:1:168:0:0:37:0] Put# [1:1:169:0:0:61:0] Put# [1:1:170:0:0:53:0] Put# [1:1:171:0:0:28:0] Put# [1:1:172:0:0:71:0] Put# [1:1:173:0:0:94:0] Put# [1:1:174:0:0:77:0] Put# [1:1:175:0:0:38:0] Put# [1:1:176:0:0:30:0] Put# [1:1:177:0:0:35:0] Put# [1:1:178:0:0:45:0] Put# [1:1:179:0:0:95:0] Put# [1:1:180:0:0:18:0] Put# [1:1:181:0:0:51:0] Put# [1:1:182:0:0:58:0] Put# [1:1:183:0:0:39:0] Put# [1:1:184:0:0:98:0] Put# [1:1:185:0:0:79:0] Put# [1:1:186:0:0:27:0] Put# [1:1:187:0:0:11:0] Put# [1:1:188:0:0:58:0] Put# [1:1:189:0:0:95:0] Put# [1:1:190:0:0:59:0] Put# [1:1:191:0:0:8:0] Put# [1:1:192:0:0:65:0] Put# [1:1:193:0:0:12:0] Put# [1:1:194:0:0:54:0] Put# [1:1:195:0:0:67:0] Put# [1:1:196:0:0:93:0] Put# [1:1:197:0:0:25:0] Put# [1:1:198:0:0:8:0] Put# [1:1:199:0:0:88:0] Put# [1:1:200:0:0:67:0] Put# [1:1:201:0:0:45:0] Put# [1:1:202:0:0:64:0] Put# [1:1:203:0:0:72:0] Put# [1:1:204:0:0:62:0] Put# [1:1:205:0:0:45:0] Put# [1:1:206:0:0:96:0] Put# [1:1:207:0:0:94:0] Put# [1:1:208:0:0:8:0] Put# [1:1:209:0:0:100:0] Put# [1:1:210:0:0:79:0] Put# [1:1:211:0:0:45:0] Put# [1:1:212:0:0:51:0] Put# [1:1:213:0:0:35:0] Put# [1:1:214:0:0:19:0] Put# [1:1:215:0:0:37:0] Put# [1:1:216:0:0:98:0] Put# [1:1:217:0:0:49:0] Put# [1:1:218:0:0:15:0] Put# [1:1:219:0:0:92:0] Put# [1:1:220:0:0:61:0] Put# [1:1:221:0:0:16:0] Put# [1:1:222:0:0:70:0] Put# [1:1:223:0:0:81:0] Put# [1:1:224:0:0:55:0] Put# [1:1:225:0:0:6:0] Put# [1:1:226:0:0:47:0] Put# [1:1:227:0:0:32:0] Put# [1:1:228:0:0:78:0] Put# [1:1:229:0:0:97:0] Put# [1:1:230:0:0:62:0] Put# [1:1:231:0:0:19:0] Put# [1:1:232:0:0:53:0] Put# [1:1:233:0:0:41:0] Put# [1:1:234:0:0:23:0] Put# [1:1:235:0:0:46:0] Put# [1:1:236:0:0:32:0] Put# [1:1:237:0:0:36:0] Put# [1:1:238:0:0:12:0] Put# [1:1:239:0:0:56:0] Put# [1:1:240:0:0:3:0] Put# [1:1:241:0:0:75:0] Put# [1:1:242:0:0:87:0] Put# [1:1:243:0:0:34:0] Put# [1:1:244:0:0:31:0] Put# [1:1:245:0:0:61:0] Put# [1:1:246:0:0:64:0] Put# [1:1:247:0:0:4:0] Put# [1:1:248:0:0:44:0] Put# [1:1:249:0:0:94:0] Put# [1:1:250:0:0:2:0] Put# [1:1:251:0:0:92:0] Put# [1:1:252:0:0:75:0] Put# [1:1:253:0:0:7:0] Put# [1:1:254:0:0:57:0] Put# [1:1:255:0:0:72:0] Put# [1:1:256:0:0:76:0] Put# [1:1:257:0:0:67:0] Put# [1:1:258:0:0:34:0] Put# [1:1:259:0:0:89:0] Put# [1:1:260:0:0:84:0] Put# [1:1:261:0:0:96:0] Put# [1:1:262:0:0:75:0] Put# [1:1:263:0:0:26:0] Put# [1:1:264:0:0:86:0] Put# [1:1:265:0:0:99:0] Put# [1:1:266:0:0:47:0] Put# [1:1:267:0:0:98:0] Put# [1:1:268:0:0:15:0] Put# [1:1:269:0:0:51:0] Put# [1:1:270:0:0:100:0] Put# [1:1:271:0:0:26:0] Put# [1:1:272:0:0:3:0] Put# [1:1:273:0:0:91:0] Put# [1:1:274:0:0:65:0] Put# [1:1:275:0:0:97:0] Put# [1:1:276:0:0:95:0] Put# [1:1:277:0:0:62:0] Put# [1:1:278:0:0:76:0] Put# [1:1:279:0:0:36:0] Put# [1:1:280:0:0:84:0] Put# [1:1:281:0:0:9:0] Put# [1:1:282:0:0:82:0] Put# [1:1:283:0:0:65:0] Put# [1:1:284:0:0:88:0] Put# [1:1:285:0:0:67:0] Put# [1:1:286:0:0:1:0] Put# [1:1:287:0:0:18:0] Put# [1:1:288:0:0:73:0] Put# [1:1:289:0:0:69:0] Put# [1:1:290:0:0:58:0] Put# [1:1:291:0:0:64:0] Put# [1:1:292:0:0:99:0] Put# [1:1:293:0:0:92:0] Put# [1:1:294:0:0:82:0] Put# [1:1:295:0:0:73:0] Put# [1:1:296:0:0:39:0] Put# [1:1:297:0:0:95:0] Put# [1:1:298:0:0:92:0] Put# [1:1:299:0:0:64:0] Put# [1:1:300:0:0:82:0] Put# [1:1:301:0:0:88:0] Put# [1:1:302:0:0:94:0] Put# [1:1:303:0:0:65:0] Put# [1:1:304:0:0:95:0] Put# [1:1:305:0:0:35:0] Put# [1:1:306:0:0:77:0] Put# [1:1:307:0:0:42:0] Put# [1:1:308:0:0:16:0] Put# [1:1:309:0:0:58:0] Put# [1:1:310:0:0:73:0] Put# [1:1:311:0:0:83:0] Put# [1:1:312:0:0:94:0] Put# [1:1:313:0:0:64:0] Put# [1:1:314:0:0:95:0] Put# [1:1:315:0:0:71:0] Put# [1:1:316:0:0:95:0] Put# [1:1:317:0:0:32:0] Put# [1:1:318:0:0:52:0] Put# [1:1:319:0:0:56:0] Put# [1:1:320:0:0:85:0] Put# [1:1:321:0:0:2:0] Put# [1:1:322:0:0:45:0] Put# [1:1:323:0:0:65:0] Put# [1:1:324:0:0:93:0] Put# [1:1:325:0:0:68:0] Put# [1:1:326:0:0:87:0] Put# [1:1:327:0:0:56:0] Put# [1:1:328:0:0:89:0] Put# [1:1:329:0:0:52:0] Put# [1:1:330:0:0:32:0] Put# [1:1:331:0:0:58:0] Put# [1:1:332:0:0:50:0] Put# [1:1:333:0:0:66:0] Put# [1:1:334:0:0:80:0] Put# [1:1:335:0:0:81:0] Put# [1:1:336:0:0:90:0] Put# [1:1:337:0:0:93:0] Put# [1:1:338:0:0:35:0] Put# [1:1:339:0:0:23:0] Put# [1:1:340:0:0:21:0] Put# [1:1:341:0:0:51:0] Put# [1:1:342:0:0:96:0] Put# [1:1:343:0:0:36:0] Put# [1:1:344:0:0:93:0] Put# [1:1:345:0:0:56:0] Put# [1:1:346:0:0:20:0] Put# [1:1:347:0:0:40:0] Put# [1:1:348:0:0:59:0] Put# [1:1:349:0:0:84:0] Put# [1:1:350:0:0:70:0] Put# [1:1:351:0:0:28:0] Put# [1:1:352:0:0:70:0] Put# [1:1:353:0:0:87:0] Put# [1:1:354:0:0:34:0] Put# [1:1:355:0:0:7:0] Put# [1:1:356:0:0:84:0] Put# [1:1:357:0:0:72:0] Put# [1:1:358:0:0:86:0] Put# [1:1:359:0:0:21:0] Put# [1:1:360:0:0:74:0] Put# [1:1:361:0:0:48:0] Put# [1:1:362:0:0:36:0] Put# [1:1:363:0:0:58:0] Put# [1:1:364:0:0:29:0] Put# [1:1:365:0:0:44:0] Put# [1:1:366:0:0:41:0] Put# [1:1:367:0:0:52:0] Put# [1:1:368:0:0:5:0] Put# [1:1:369:0:0:92:0] Put# [1:1:370:0:0:24:0] Put# [1:1:371:0:0:69:0] Put# [1:1:372:0:0:69:0] Put# [1:1:373:0:0:46:0] Put# [1:1:374:0:0:69:0] Put# [1:1:375:0:0:14:0] Put# [1:1:376:0:0:76:0] Put# [1:1:377:0:0:68:0] Put# [1:1:378:0:0:78:0] Put# [1:1:379:0:0:92:0] Put# [1:1:380:0:0:86:0] Put# [1:1:381:0:0:15:0] Put# [1:1:382:0:0:21:0] Put# [1:1:383:0:0:51:0] Put# [1:1:384:0:0:30:0] Put# [1:1:385:0:0:24:0] Put# [1:1:386:0:0:89:0] Put# [1:1:387:0:0:83:0] Put# [1:1:388:0:0:48:0] Put# [1:1:389:0:0:13:0] Put# [1:1:390:0:0:5:0] Put# [1:1:391:0:0:84:0] Put# [1:1:392:0:0:72:0] Put# [1:1:393:0:0:63:0] Put# [1:1:394:0:0:78:0] Put# [1:1:395:0:0:68:0] Put# [1:1:396:0:0:67:0] Put# [1:1:397:0:0:68:0] Put# [1:1:398:0:0:2:0] Put# [1:1:399:0:0:42:0] Put# [1:1:400:0:0:31:0] Put# [1:1:401:0:0:7:0] Put# [1:1:402:0:0:24:0] Put# [1:1:403:0:0:4:0] Put# [1:1:404:0:0:79:0] Put# [1:1:405:0:0:51:0] Put# [1:1:406:0:0:95:0] Put# [1:1:407:0:0:55:0] Put# [1:1:408:0:0:40:0] Put# [1:1:409:0:0:11:0] Put# [1:1:410:0:0:26:0] Put# [1:1:411:0:0:2:0] Put# [1:1:412:0:0:94:0] Put# [1:1:413:0:0:100:0] Put# [1:1:414:0:0:40:0] Put# [1:1:415:0:0:6:0] Put# [1:1:416:0:0:72:0] Put# [1:1:417:0:0:95:0] Put# [1:1:418:0:0:49:0] Put# [1:1:419:0:0:86:0] Put# [1:1:420:0:0:44:0] Put# [1:1:421:0:0:49:0] Put# [1:1:422:0:0:34:0] Put# [1:1:423:0:0:67:0] Put# [1:1:424:0:0:70:0] Put# [1:1:425:0:0:63:0] Put# [1:1:426:0:0:100:0] Put# [1:1:427:0:0:36:0] Put# [1:1:428:0:0:98:0] Put# [1:1:429:0:0:69:0] Put# [1:1:430:0:0:20:0] Put# [1:1:431:0:0:3:0] Put# [1:1:432:0:0:20:0] Put# [1:1:433:0:0:34:0] Put# [1:1:434:0:0:7:0] Put# [1:1:435:0:0:10:0] Put# [1:1:436:0:0:45:0] Put# [1:1:437:0:0:47:0] Put# [1:1:438:0:0:98:0] Put# [1:1:439:0:0:14:0] Put# [1:1:440:0:0:98:0] Put# [1:1:441:0:0:42:0] Put# [1:1:442:0:0:39:0] Put# [1:1:443:0:0:37:0] Put# [1:1:444:0:0:83:0] Put# [1:1:445:0:0:34:0] Put# [1:1:446:0:0:67:0] Put# [1:1:447:0:0:17:0] Put# [1:1:448:0:0:58:0] Put# [1:1 ... 4:1259:0:0:66:0] Put# [1:4:1260:0:0:20:0] Put# [1:4:1261:0:0:67:0] Put# [1:4:1262:0:0:92:0] Put# [1:4:1263:0:0:85:0] Put# [1:4:1264:0:0:52:0] Put# [1:4:1265:0:0:16:0] Put# [1:4:1266:0:0:34:0] Put# [1:4:1267:0:0:61:0] Put# [1:4:1268:0:0:39:0] Put# [1:4:1269:0:0:22:0] Put# [1:4:1270:0:0:65:0] Put# [1:4:1271:0:0:28:0] Put# [1:4:1272:0:0:8:0] Put# [1:4:1273:0:0:87:0] Put# [1:4:1274:0:0:8:0] Put# [1:4:1275:0:0:55:0] Put# [1:4:1276:0:0:65:0] Put# [1:4:1277:0:0:68:0] Put# [1:4:1278:0:0:2:0] Put# [1:4:1279:0:0:71:0] Put# [1:4:1280:0:0:95:0] Put# [1:4:1281:0:0:43:0] Put# [1:4:1282:0:0:38:0] Put# [1:4:1283:0:0:50:0] Put# [1:4:1284:0:0:62:0] Put# [1:4:1285:0:0:97:0] Put# [1:4:1286:0:0:28:0] Put# [1:4:1287:0:0:62:0] Put# [1:4:1288:0:0:83:0] Put# [1:4:1289:0:0:3:0] Put# [1:4:1290:0:0:17:0] Put# [1:4:1291:0:0:42:0] Put# [1:4:1292:0:0:52:0] Put# [1:4:1293:0:0:53:0] Put# [1:4:1294:0:0:66:0] Put# [1:4:1295:0:0:3:0] Put# [1:4:1296:0:0:51:0] Put# [1:4:1297:0:0:17:0] Put# [1:4:1298:0:0:97:0] Put# [1:4:1299:0:0:89:0] Put# [1:4:1300:0:0:97:0] Put# [1:4:1301:0:0:5:0] Put# [1:4:1302:0:0:78:0] Put# [1:4:1303:0:0:43:0] Put# [1:4:1304:0:0:71:0] Put# [1:4:1305:0:0:7:0] Put# [1:4:1306:0:0:99:0] Put# [1:4:1307:0:0:1:0] Put# [1:4:1308:0:0:84:0] Put# [1:4:1309:0:0:16:0] Put# [1:4:1310:0:0:42:0] Put# [1:4:1311:0:0:25:0] Put# [1:4:1312:0:0:11:0] Put# [1:4:1313:0:0:6:0] Put# [1:4:1314:0:0:78:0] Put# [1:4:1315:0:0:98:0] Put# [1:4:1316:0:0:72:0] Put# [1:4:1317:0:0:29:0] Put# [1:4:1318:0:0:64:0] Put# [1:4:1319:0:0:47:0] Put# [1:4:1320:0:0:78:0] Put# [1:4:1321:0:0:14:0] Put# [1:4:1322:0:0:76:0] Put# [1:4:1323:0:0:85:0] Put# [1:4:1324:0:0:61:0] Put# [1:4:1325:0:0:33:0] Put# [1:4:1326:0:0:100:0] Put# [1:4:1327:0:0:93:0] Put# [1:4:1328:0:0:72:0] Put# [1:4:1329:0:0:2:0] Put# [1:4:1330:0:0:13:0] Put# [1:4:1331:0:0:71:0] Put# [1:4:1332:0:0:5:0] Put# [1:4:1333:0:0:49:0] Put# [1:4:1334:0:0:7:0] Put# [1:4:1335:0:0:51:0] Put# [1:4:1336:0:0:85:0] Put# [1:4:1337:0:0:61:0] Put# [1:4:1338:0:0:42:0] Put# [1:4:1339:0:0:67:0] Put# [1:4:1340:0:0:65:0] Put# [1:4:1341:0:0:51:0] Put# [1:4:1342:0:0:37:0] Put# [1:4:1343:0:0:46:0] Put# [1:4:1344:0:0:86:0] Put# [1:4:1345:0:0:74:0] Put# [1:4:1346:0:0:77:0] Put# [1:4:1347:0:0:32:0] Put# [1:4:1348:0:0:25:0] Put# [1:4:1349:0:0:43:0] Put# [1:4:1350:0:0:68:0] Put# [1:4:1351:0:0:35:0] Put# [1:4:1352:0:0:66:0] Put# [1:4:1353:0:0:73:0] Put# [1:4:1354:0:0:3:0] Put# [1:4:1355:0:0:28:0] Put# [1:4:1356:0:0:91:0] Put# [1:4:1357:0:0:63:0] Put# [1:4:1358:0:0:93:0] Put# [1:4:1359:0:0:77:0] Put# [1:4:1360:0:0:52:0] Put# [1:4:1361:0:0:89:0] Put# [1:4:1362:0:0:4:0] Put# [1:4:1363:0:0:10:0] Put# [1:4:1364:0:0:19:0] Put# [1:4:1365:0:0:1:0] Put# [1:4:1366:0:0:11:0] Put# [1:4:1367:0:0:63:0] Put# [1:4:1368:0:0:62:0] Put# [1:4:1369:0:0:19:0] Put# [1:4:1370:0:0:57:0] Put# [1:4:1371:0:0:9:0] Put# [1:4:1372:0:0:15:0] Put# [1:4:1373:0:0:58:0] Put# [1:4:1374:0:0:93:0] Put# [1:4:1375:0:0:53:0] Put# [1:4:1376:0:0:65:0] Put# [1:4:1377:0:0:37:0] Put# [1:4:1378:0:0:93:0] Put# [1:4:1379:0:0:76:0] Put# [1:4:1380:0:0:3:0] Put# [1:4:1381:0:0:1:0] Put# [1:4:1382:0:0:52:0] Put# [1:4:1383:0:0:90:0] Put# [1:4:1384:0:0:68:0] Put# [1:4:1385:0:0:55:0] Put# [1:4:1386:0:0:51:0] Put# [1:4:1387:0:0:90:0] Put# [1:4:1388:0:0:83:0] Put# [1:4:1389:0:0:26:0] Put# [1:4:1390:0:0:10:0] Put# [1:4:1391:0:0:96:0] Put# [1:4:1392:0:0:65:0] Put# [1:4:1393:0:0:47:0] Put# [1:4:1394:0:0:31:0] Put# [1:4:1395:0:0:70:0] Put# [1:4:1396:0:0:82:0] Put# [1:4:1397:0:0:47:0] Put# [1:4:1398:0:0:94:0] Put# [1:4:1399:0:0:61:0] Put# [1:4:1400:0:0:1:0] Put# [1:4:1401:0:0:76:0] Put# [1:4:1402:0:0:94:0] Put# [1:4:1403:0:0:75:0] Put# [1:4:1404:0:0:76:0] Put# [1:4:1405:0:0:29:0] Put# [1:4:1406:0:0:60:0] Put# [1:4:1407:0:0:4:0] Put# [1:4:1408:0:0:51:0] Put# [1:4:1409:0:0:87:0] Put# [1:4:1410:0:0:95:0] Put# [1:4:1411:0:0:15:0] Put# [1:4:1412:0:0:91:0] Put# [1:4:1413:0:0:23:0] Put# [1:4:1414:0:0:60:0] Put# [1:4:1415:0:0:57:0] Put# [1:4:1416:0:0:46:0] Put# [1:4:1417:0:0:1:0] Put# [1:4:1418:0:0:30:0] Put# [1:4:1419:0:0:38:0] Put# [1:4:1420:0:0:21:0] Put# [1:4:1421:0:0:80:0] Put# [1:4:1422:0:0:73:0] Put# [1:4:1423:0:0:47:0] Put# [1:4:1424:0:0:43:0] Put# [1:4:1425:0:0:11:0] Put# [1:4:1426:0:0:41:0] Put# [1:4:1427:0:0:92:0] Put# [1:4:1428:0:0:87:0] Put# [1:4:1429:0:0:92:0] Put# [1:4:1430:0:0:81:0] Put# [1:4:1431:0:0:24:0] Put# [1:4:1432:0:0:69:0] Put# [1:4:1433:0:0:40:0] Put# [1:4:1434:0:0:44:0] Put# [1:4:1435:0:0:87:0] Put# [1:4:1436:0:0:75:0] Put# [1:4:1437:0:0:41:0] Put# [1:4:1438:0:0:9:0] Put# [1:4:1439:0:0:51:0] Put# [1:4:1440:0:0:76:0] Put# [1:4:1441:0:0:49:0] Put# [1:4:1442:0:0:35:0] Put# [1:4:1443:0:0:23:0] Put# [1:4:1444:0:0:56:0] Put# [1:4:1445:0:0:6:0] Put# [1:4:1446:0:0:37:0] Put# [1:4:1447:0:0:33:0] Put# [1:4:1448:0:0:52:0] Put# [1:4:1449:0:0:35:0] Put# [1:4:1450:0:0:22:0] Put# [1:4:1451:0:0:4:0] Put# [1:4:1452:0:0:95:0] Put# [1:4:1453:0:0:18:0] Put# [1:4:1454:0:0:72:0] Put# [1:4:1455:0:0:99:0] Put# [1:4:1456:0:0:39:0] Put# [1:4:1457:0:0:99:0] Put# [1:4:1458:0:0:76:0] Put# [1:4:1459:0:0:91:0] Put# [1:4:1460:0:0:100:0] Put# [1:4:1461:0:0:87:0] Put# [1:4:1462:0:0:51:0] Put# [1:4:1463:0:0:81:0] Put# [1:4:1464:0:0:77:0] Put# [1:4:1465:0:0:76:0] Put# [1:4:1466:0:0:93:0] Put# [1:4:1467:0:0:4:0] Put# [1:4:1468:0:0:54:0] Put# [1:4:1469:0:0:64:0] Put# [1:4:1470:0:0:14:0] Put# [1:4:1471:0:0:36:0] Put# [1:4:1472:0:0:30:0] Put# [1:4:1473:0:0:9:0] Put# [1:4:1474:0:0:2:0] Put# [1:4:1475:0:0:48:0] Put# [1:4:1476:0:0:27:0] Put# [1:4:1477:0:0:28:0] Put# [1:4:1478:0:0:98:0] Put# [1:4:1479:0:0:37:0] Put# [1:4:1480:0:0:22:0] Put# [1:4:1481:0:0:90:0] Put# [1:4:1482:0:0:27:0] Put# [1:4:1483:0:0:2:0] Put# [1:4:1484:0:0:1:0] Put# [1:4:1485:0:0:80:0] Put# [1:4:1486:0:0:93:0] Put# [1:4:1487:0:0:65:0] Put# [1:4:1488:0:0:54:0] Put# [1:4:1489:0:0:5:0] Put# [1:4:1490:0:0:63:0] Put# [1:4:1491:0:0:15:0] Put# [1:4:1492:0:0:41:0] Put# [1:4:1493:0:0:70:0] Put# [1:4:1494:0:0:44:0] Put# [1:4:1495:0:0:59:0] Put# [1:4:1496:0:0:19:0] Put# [1:4:1497:0:0:80:0] Put# [1:4:1498:0:0:79:0] Put# [1:4:1499:0:0:85:0] Put# [1:4:1500:0:0:87:0] Put# [1:4:1501:0:0:70:0] Put# [1:4:1502:0:0:22:0] Put# [1:4:1503:0:0:76:0] Put# [1:4:1504:0:0:67:0] Put# [1:4:1505:0:0:88:0] Put# [1:4:1506:0:0:12:0] Put# [1:4:1507:0:0:12:0] Put# [1:4:1508:0:0:27:0] Put# [1:4:1509:0:0:3:0] Put# [1:4:1510:0:0:41:0] Put# [1:4:1511:0:0:53:0] Put# [1:4:1512:0:0:15:0] Put# [1:4:1513:0:0:83:0] Put# [1:4:1514:0:0:19:0] Put# [1:4:1515:0:0:94:0] Put# [1:4:1516:0:0:86:0] Put# [1:4:1517:0:0:94:0] Put# [1:4:1518:0:0:77:0] Put# [1:4:1519:0:0:92:0] Put# [1:4:1520:0:0:1:0] Put# [1:4:1521:0:0:59:0] Put# [1:4:1522:0:0:52:0] Put# [1:4:1523:0:0:59:0] Put# [1:4:1524:0:0:41:0] Put# [1:4:1525:0:0:55:0] Put# [1:4:1526:0:0:32:0] Put# [1:4:1527:0:0:53:0] Put# [1:4:1528:0:0:43:0] Put# [1:4:1529:0:0:85:0] Put# [1:4:1530:0:0:59:0] Put# [1:4:1531:0:0:7:0] Put# [1:4:1532:0:0:91:0] Put# [1:4:1533:0:0:93:0] Put# [1:4:1534:0:0:38:0] Put# [1:4:1535:0:0:37:0] Put# [1:4:1536:0:0:91:0] Put# [1:4:1537:0:0:92:0] Put# [1:4:1538:0:0:61:0] Put# [1:4:1539:0:0:38:0] Put# [1:4:1540:0:0:1:0] Put# [1:4:1541:0:0:34:0] Put# [1:4:1542:0:0:12:0] Put# [1:4:1543:0:0:43:0] Put# [1:4:1544:0:0:67:0] Put# [1:4:1545:0:0:33:0] Put# [1:4:1546:0:0:1:0] Put# [1:4:1547:0:0:34:0] Put# [1:4:1548:0:0:100:0] Put# [1:4:1549:0:0:24:0] Put# [1:4:1550:0:0:72:0] Put# [1:4:1551:0:0:64:0] Put# [1:4:1552:0:0:11:0] Put# [1:4:1553:0:0:45:0] Put# [1:4:1554:0:0:11:0] Put# [1:4:1555:0:0:68:0] Put# [1:4:1556:0:0:97:0] Put# [1:4:1557:0:0:19:0] Put# [1:4:1558:0:0:27:0] Put# [1:4:1559:0:0:33:0] Put# [1:4:1560:0:0:44:0] Put# [1:4:1561:0:0:51:0] Put# [1:4:1562:0:0:33:0] Put# [1:4:1563:0:0:43:0] Put# [1:4:1564:0:0:63:0] Put# [1:4:1565:0:0:83:0] Put# [1:4:1566:0:0:79:0] Put# [1:4:1567:0:0:44:0] Put# [1:4:1568:0:0:97:0] Put# [1:4:1569:0:0:62:0] Put# [1:4:1570:0:0:88:0] Put# [1:4:1571:0:0:16:0] Put# [1:4:1572:0:0:84:0] Put# [1:4:1573:0:0:81:0] Put# [1:4:1574:0:0:98:0] Put# [1:4:1575:0:0:42:0] Put# [1:4:1576:0:0:57:0] Put# [1:4:1577:0:0:26:0] Put# [1:4:1578:0:0:13:0] Put# [1:4:1579:0:0:19:0] Put# [1:4:1580:0:0:92:0] Put# [1:4:1581:0:0:94:0] Put# [1:4:1582:0:0:8:0] Put# [1:4:1583:0:0:2:0] Put# [1:4:1584:0:0:40:0] Put# [1:4:1585:0:0:38:0] Put# [1:4:1586:0:0:43:0] Put# [1:4:1587:0:0:28:0] Put# [1:4:1588:0:0:57:0] Put# [1:4:1589:0:0:11:0] Put# [1:4:1590:0:0:12:0] Put# [1:4:1591:0:0:45:0] Put# [1:4:1592:0:0:2:0] Put# [1:4:1593:0:0:58:0] Put# [1:4:1594:0:0:62:0] Put# [1:4:1595:0:0:3:0] Put# [1:4:1596:0:0:44:0] Put# [1:4:1597:0:0:71:0] Put# [1:4:1598:0:0:93:0] Put# [1:4:1599:0:0:98:0] Put# [1:4:1600:0:0:98:0] Put# [1:4:1601:0:0:61:0] Put# [1:4:1602:0:0:23:0] Put# [1:4:1603:0:0:67:0] Put# [1:4:1604:0:0:51:0] Put# [1:4:1605:0:0:81:0] Put# [1:4:1606:0:0:17:0] Put# [1:4:1607:0:0:42:0] Put# [1:4:1608:0:0:32:0] Put# [1:4:1609:0:0:37:0] Put# [1:4:1610:0:0:89:0] Put# [1:4:1611:0:0:39:0] Put# [1:4:1612:0:0:48:0] Put# [1:4:1613:0:0:47:0] Put# [1:4:1614:0:0:80:0] Put# [1:4:1615:0:0:87:0] Put# [1:4:1616:0:0:32:0] Put# [1:4:1617:0:0:25:0] Put# [1:4:1618:0:0:15:0] Put# [1:4:1619:0:0:50:0] Put# [1:4:1620:0:0:23:0] Put# [1:4:1621:0:0:64:0] Put# [1:4:1622:0:0:64:0] Put# [1:4:1623:0:0:38:0] Put# [1:4:1624:0:0:64:0] Put# [1:4:1625:0:0:20:0] Put# [1:4:1626:0:0:40:0] Put# [1:4:1627:0:0:1:0] Put# [1:4:1628:0:0:93:0] Put# [1:4:1629:0:0:19:0] Put# [1:4:1630:0:0:92:0] Put# [1:4:1631:0:0:98:0] Put# [1:4:1632:0:0:83:0] Put# [1:4:1633:0:0:60:0] Put# [1:4:1634:0:0:57:0] Put# [1:4:1635:0:0:15:0] Put# [1:4:1636:0:0:67:0] Put# [1:4:1637:0:0:45:0] Put# [1:4:1638:0:0:61:0] Put# [1:4:1639:0:0:5:0] Put# [1:4:1640:0:0:60:0] Put# [1:4:1641:0:0:14:0] Put# [1:4:1642:0:0:21:0] Put# [1:4:1643:0:0:36:0] Put# [1:4:1644:0:0:40:0] Put# [1:4:1645:0:0:56:0] Put# [1:4:1646:0:0:75:0] Put# [1:4:1647:0:0:78:0] Put# [1:4:1648:0:0:100:0] Put# [1:4:1649:0:0:41:0] Put# [1:4:1650:0:0:91:0] Put# [1:4:1651:0:0:71:0] Put# [1:4:1652:0:0:30:0] Put# [1:4:1653:0:0:73:0] Put# [1:4:1654:0:0:16:0] Put# [1:4:1655:0:0:95:0] Put# [1:4:1656:0:0:50:0] Put# [1:4:1657:0:0:20:0] Put# [1:4:1658:0:0:70:0] Put# [1:4:1659:0:0:28:0] Put# [1:4:1660:0:0:89:0] Put# [1:4:1661:0:0:60:0] Put# [1:4:1662:0:0:12:0] Put# [1:4:1663:0:0:68:0] Put# [1:4:1664:0:0:91:0] Put# [1:4:1665:0:0:58:0] Put# [1:4:1666:0:0:1:0] Put# [1:4:1667:0:0:46:0] Put# [1:4:1668:0:0:34:0] Put# [1:4:1669:0:0:8:0] Put# [1:4:1670:0:0:2:0] Put# [1:4:1671:0:0:86:0] Put# [1:4:1672:0:0:52:0] Put# [1:4:1673:0:0:87:0] Put# [1:4:1674:0:0:25:0] Put# [1:4:1675:0:0:84:0] Put# [1:4:1676:0:0:5:0] Put# [1:4:1677:0:0:98:0] Put# [1:4:1678:0:0:8:0] Put# [1:4:1679:0:0:66:0] Put# [1:4:1680:0:0:96:0] Put# [1:4:1681:0:0:32:0] Put# [1:4:1682:0:0:21:0] Put# [1:4:1683:0:0:3:0] Put# [1:4:1684:0:0:50:0] Put# [1:4:1685:0:0:25:0] Put# [1:4:1686:0:0:84:0] Put# [1:4:1687:0:0:12:0] Put# [1:4:1688:0:0:99:0] Put# [1:4:1689:0:0:87:0] Put# [1:4:1690:0:0:12:0] Put# [1:4:1691:0:0:79:0] Put# [1:4:1692:0:0:40:0] Put# [1:4:1693:0:0:17:0] Put# [1:4:1694:0:0:54:0] Put# [1:4:1695:0:0:80:0] Put# [1:4:1696:0:0:39:0] Put# [1:4:1697:0:0:86:0] Put# [1:4:1698:0:0:85:0] Put# [1:4:1699:0:0:91:0] Put# [1:4:1700:0:0:74:0] Put# [1:4:1701:0:0:94:0] Put# [1:4:1702:0:0:49:0] Put# [1:4:1703:0:0:39:0] Put# [1:4:1704:0:0:19:0] Put# [1:4:1705:0:0:66:0] Put# [1:4:1706:0:0:84:0] Put# [1:4:1707:0:0:44:0] Put# [1:4:1708:0:0:19:0] Put# [1:4:1709:0:0:10:0] Put# [1:4:1710:0:0:52:0] Put# [1:4:1711:0:0:28:0] Put# [1:4:1712:0:0:83:0] Put# [1:4:1713:0:0:81:0] Put# [1:4:1714:0:0:91:0] Put# [1:4:1715:0:0:80:0] Put# [1:4:1716:0:0:29:0] Put# [1:4:1717:0:0:40:0] Put# [1:4:1718:0:0:80:0] Put# [1:4:1719:0:0:88:0] Put# [1:4:1720:0:0:3:0] Put# [1:4:1721:0:0:6:0] Put# [1:4:1722:0:0:27:0] Put# [1:4:1723:0:0:45:0] Put# [1:4:1724:0:0:88:0] Put# [1:4:1725:0:0:31:0] Put# [1:4:1726:0:0:23:0] Put# [1:4:1727:0:0:56:0] Reassign# 3 -- VSlotId { NodeId: 5 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 4 VDiskKind: "Default" FailDomainIdx: 4 VDiskMetrics { SatisfactionRank: 25 VSlotId { NodeId: 5 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green } Status: "READY" Ready: true Put# [1:4:1728:0:0:78:0] >> TSubscriberTest::SyncWithOutdatedReplica >> TSubscriberTest::NotifyUpdate [GOOD] >> TSubscriberTest::InvalidNotification [GOOD] >> TSubscriberTest::SyncPartial >> BsControllerTest::TestLocalSelfHeal [GOOD] >> TSubscriberTest::Sync [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2024-11-21T23:13:09.406793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:13:09.409990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:13:09.410145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001003/r3tmp/tmphQdvaH/pdisk_1.dat 2024-11-21T23:13:09.946824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:13:09.955645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:13:09.992170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:13:09.993185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:13:09.997121Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2024-11-21T23:13:09.997200Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T23:13:10.023969Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:10.027322Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2024-11-21T23:13:10.084512Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:307:2347] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T23:13:10.084699Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2024-11-21T23:13:10.084849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:10.084921Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T23:13:10.084966Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:13:10.085020Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T23:13:10.085465Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:13:10.085612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:10.085921Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T23:13:10.085967Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T23:13:10.086005Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T23:13:10.086049Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T23:13:10.086270Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2024-11-21T23:13:10.099177Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2024-11-21T23:13:10.099267Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:307:2347]) 2024-11-21T23:13:10.099412Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T23:13:10.100023Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2024-11-21T23:13:10.100120Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Execute 2024-11-21T23:13:10.100163Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:13:10.100241Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Complete 2024-11-21T23:13:10.100449Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443335680 } 2024-11-21T23:13:10.100537Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2024-11-21T23:13:10.100580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:10.100743Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2024-11-21T23:13:10.100797Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T23:13:10.100830Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:13:10.100985Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T23:13:10.101020Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T23:13:10.101071Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T23:13:10.101107Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T23:13:10.112534Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2024-11-21T23:13:10.112613Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T23:13:10.223898Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2024-11-21T23:13:10.224009Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T23:13:10.224425Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2024-11-21T23:13:10.224779Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2024-11-21T23:13:10.224843Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2024-11-21T23:13:10.225759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:13:10.226969Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T23:13:10.227083Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T23:13:10.227120Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2024-11-21T23:13:10.227154Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2024-11-21T23:13:10.232023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:10.232177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T23:13:10.233341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T23:13:10.240275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:13:10.241508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:13:10.241589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:13:10.255961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2024-11-21T23:13:10.274321Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2024-11-21T23:13:10.285870Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2024-11-21T23:13:10.285996Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T23:13:10.286194Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2024-11-21T23:13:10.286237Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2024-11-21T23:13:10.286295Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T23:13:10.286465Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2024-11-21T23:13:10.287079Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T23:13:10.287217Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T23:13:10.287959Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T23:13:10.288338Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2024-11-21T23:13:10.288458Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004596608}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2024-11-21T23:13:10.288534Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004596608}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2024-11-21T23:13:10.288735Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004596608}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2024-11-21T23:13:10.288808Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004596608}: tablet 72075186224037888 channel 2 assigned to group 2181038080 ... 2:4 2024-11-21T23:13:27.431935Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715667 at tablet 72075186224037889 2024-11-21T23:13:27.432139Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:13:27.432423Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:13:27.432530Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:13:27.432566Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:13:27.432608Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2024-11-21T23:13:27.447245Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:13:27.447381Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:13:27.449037Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 HANDLE EvProposeTransaction marker# C0 2024-11-21T23:13:27.449121Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 step# 32501 Status# 16 SEND to# [2:379:2374] Proxy marker# C1 2024-11-21T23:13:27.563340Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715667 has been planned 2024-11-21T23:13:27.563457Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T23:13:27.563510Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 2024-11-21T23:13:27.563855Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2024-11-21T23:13:27.564295Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715667 marker# C2 2024-11-21T23:13:27.564414Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:379:2374] Proxy 2024-11-21T23:13:27.564916Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:13:27.565461Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715667 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715667 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T23:13:27.565517Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:13:27.565811Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:13:27.565867Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:13:27.565919Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715667] in PlanQueue unit at 72075186224037889 2024-11-21T23:13:27.566117Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715667 keys extracted: 0 2024-11-21T23:13:27.566235Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:13:27.566371Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:13:27.566451Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2024-11-21T23:13:27.566845Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:13:27.568784Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 33000 txid# 281474976715667} 2024-11-21T23:13:27.568848Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2024-11-21T23:13:27.568903Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:13:27.569163Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T23:13:27.569248Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T23:13:27.569284Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2024-11-21T23:13:27.569327Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 acknowledged 2024-11-21T23:13:27.569386Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 acknowledged 2024-11-21T23:13:27.569728Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:13:27.569787Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715667] from 72075186224037889 at tablet 72075186224037889 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:13:27.569858Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715667 state PreOffline TxInFly 0 2024-11-21T23:13:27.569951Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:13:27.570131Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715667, done: 0, blocked: 1 2024-11-21T23:13:27.573736Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715667 datashard 72075186224037889 state PreOffline 2024-11-21T23:13:27.573815Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T23:13:27.574367Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2024-11-21T23:13:27.574514Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715667, publications: 2, subscribers: 1 2024-11-21T23:13:27.575435Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715667, subscribers: 1 2024-11-21T23:13:27.575969Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T23:13:27.611578Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.611 INFO ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2024-11-21T23:13:27.611780Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.611 INFO ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2024-11-21T23:13:27.611888Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.611 INFO ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:59: Begin, root #43 2024-11-21T23:13:27.612004Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.611 INFO ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2024-11-21T23:13:27.612076Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.612 TRACE ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:387: {0}, callable #43 2024-11-21T23:13:27.612176Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.612 INFO ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:577: Node #43 finished execution 2024-11-21T23:13:27.612301Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.612 INFO ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2024-11-21T23:13:27.612381Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.612 INFO ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2024-11-21T23:13:27.612468Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.612 INFO ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2024-11-21T23:13:27.612724Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.612 NOTE ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2024-11-21T23:13:27.612796Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.612 NOTE ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2024-11-21T23:13:27.612852Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=ZjlmMTdlNzAtNWJjZjA5Y2QtODNjMjY2NjktNWE3MjUwOWU= 2024-11-21 23:13:27.612 NOTE ydb-core-tx-datashard-ut_minstep(pid=222680, tid=0x00007FBBF4A14B80) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2024-11-21T23:13:27.626590Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T23:13:27.626894Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2024-11-21T23:13:27.629069Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T23:13:27.630049Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2024-11-21T23:13:27.631090Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2024-11-21T23:13:27.631164Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2024-11-21T23:13:27.631294Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2024-11-21T23:13:27.631469Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2024-11-21T23:13:27.631623Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2024-11-21T23:13:28.499948Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:28.502578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T23:13:28.502705Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T23:13:28.502746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T23:13:28.502821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T23:13:28.502923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T23:13:28.502983Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.503057Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T23:13:28.503102Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.503580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T23:13:28.503662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:33:2064] 2024-11-21T23:13:28.503715Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Update to strong state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::NotifyDelete >> TSubscriberTest::SyncPartial [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2024-11-21T23:13:28.692462Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:28.694150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T23:13:28.694227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T23:13:28.694266Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T23:13:28.694333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T23:13:28.694454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T23:13:28.694504Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.694574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T23:13:28.694613Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.694760Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:31:2063] 2024-11-21T23:13:28.694803Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:32:2064][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:31:2063] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2024-11-21T23:13:28.931663Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:28.933268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2024-11-21T23:13:28.933357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2024-11-21T23:13:28.933406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2024-11-21T23:13:28.933468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:34:2065] 2024-11-21T23:13:28.933532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:35:2065] 2024-11-21T23:13:28.933586Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.933704Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:36:2065] 2024-11-21T23:13:28.933750Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.933834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T23:13:28.933907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2065], cookie# 1 2024-11-21T23:13:28.933958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2024-11-21T23:13:28.933997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2024-11-21T23:13:28.934035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2024-11-21T23:13:28.934082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T23:13:28.934106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T23:13:28.934144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:34:2065], cookie# 1 2024-11-21T23:13:28.934171Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:13:28.934205Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:35:2065], cookie# 1 2024-11-21T23:13:28.934231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:13:28.934306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2065], cookie# 1 2024-11-21T23:13:28.934333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Unexpected sync response: sender# [1:36:2065], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2024-11-21T23:13:28.886831Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:28.888569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T23:13:28.888667Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2024-11-21T23:13:28.888733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2024-11-21T23:13:28.888800Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:34:2065] 2024-11-21T23:13:28.888862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2024-11-21T23:13:28.888923Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.889057Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2065] 2024-11-21T23:13:28.889117Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.889214Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T23:13:28.889313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2065], cookie# 1 2024-11-21T23:13:28.889357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2024-11-21T23:13:28.889383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2024-11-21T23:13:28.889438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2024-11-21T23:13:28.889483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T23:13:28.889509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T23:13:28.889556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:34:2065], cookie# 1 2024-11-21T23:13:28.889613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:13:28.889670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:35:2065], cookie# 1 2024-11-21T23:13:28.889702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:13:28.889789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2065], cookie# 1 2024-11-21T23:13:28.889815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Unexpected sync response: sender# [1:36:2065], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2024-11-21T23:13:26.285783Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T23:13:26.285841Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T23:13:26.285922Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T23:13:26.285945Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T23:13:26.286040Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T23:13:26.286063Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T23:13:26.286102Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T23:13:26.286122Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T23:13:26.286166Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T23:13:26.286190Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T23:13:26.286236Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T23:13:26.286268Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T23:13:26.286311Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T23:13:26.286330Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T23:13:26.286365Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T23:13:26.286406Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T23:13:26.286471Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T23:13:26.286494Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T23:13:26.286542Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T23:13:26.286563Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T23:13:26.286622Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T23:13:26.286644Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T23:13:26.286684Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T23:13:26.286703Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T23:13:26.286764Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T23:13:26.286788Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T23:13:26.286833Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T23:13:26.286852Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T23:13:26.286886Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T23:13:26.286906Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T23:13:26.286944Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T23:13:26.286963Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T23:13:26.287023Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T23:13:26.287042Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T23:13:26.287082Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T23:13:26.287102Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T23:13:26.287135Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T23:13:26.287154Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T23:13:26.287193Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T23:13:26.287212Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T23:13:26.287246Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T23:13:26.287271Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T23:13:26.287307Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T23:13:26.287329Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T23:13:26.287381Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T23:13:26.287407Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T23:13:26.287440Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T23:13:26.287459Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T23:13:26.287507Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T23:13:26.287532Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T23:13:26.287582Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T23:13:26.287602Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T23:13:26.287654Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T23:13:26.287691Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T23:13:26.287732Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T23:13:26.287751Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T23:13:26.287786Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T23:13:26.287810Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T23:13:26.287854Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T23:13:26.287872Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T23:13:26.287913Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T23:13:26.287932Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T23:13:26.287962Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T23:13:26.287981Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T23:13:26.288020Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T23:13:26.288040Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T23:13:26.288080Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T23:13:26.288098Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T23:13:26.288132Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T23:13:26.288150Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T23:13:26.288189Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T23:13:26.288208Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T23:13:26.306198Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T23:13:26.307682Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T23:13:26.307754Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T23:13:26.307811Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T23:13:26.307849Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T23:13:26.307884Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T23:13:26.307943Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T23:13:26.307983Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T23:13:26.308020Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T23:13:26.308063Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T23:13:26.308100Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T23:13:26.308141Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T23:13:26.308178Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T23:13:26.308215Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T23:13:26.308255Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T23:13:26.308295Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T23:13:26.308336Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T23:13:26.308408Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T23:13:26.308445Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T23:13:26.308479Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T23:13:26.308521Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T23:13:26.308560Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T23:13:26.308595Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T23:13:26.308630Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T23:13:26.308673Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T23:13:26.308726Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T23:13:26.308777Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T23:13:26.308844Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T23:13:26.309360Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T23:13:26.309411Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T23:13:26.309447Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T23:13:26.309485Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T23:13:26.309523Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T23:13:26.309620Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T23:13:26.309658Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 0.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T23:13:28.270844Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000004f:1:2:0:0] -> [8000004f:2:2:0:0] 2024-11-21T23:13:28.270961Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T23:13:28.271013Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000004f:1:2:1:0] -> [8000004f:2:2:1:0] 2024-11-21T23:13:28.271150Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:28.271219Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000003f:1:1:0:0] -> [8000003f:2:1:0:0] 2024-11-21T23:13:28.271303Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T23:13:28.271346Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000003f:1:2:2:0] -> [8000003f:2:2:2:0] 2024-11-21T23:13:28.271436Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T23:13:28.271486Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000003f:1:1:1:0] -> [8000003f:2:1:1:0] 2024-11-21T23:13:28.271582Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T23:13:28.271624Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000003f:1:0:0:0] -> [8000003f:2:0:0:0] 2024-11-21T23:13:28.271714Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:28.271764Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000003f:1:1:2:0] -> [8000003f:2:1:2:0] 2024-11-21T23:13:28.271860Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T23:13:28.271914Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000003f:1:0:1:0] -> [8000003f:2:0:1:0] 2024-11-21T23:13:28.272031Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.272066Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000003f:2:0:2:0] PDiskId# 1001 VSlotId# 1009 created 2024-11-21T23:13:28.272122Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000003f:2:0:2:0] status changed to INIT_PENDING 2024-11-21T23:13:28.272220Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T23:13:28.272270Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000003f:1:2:0:0] -> [8000003f:2:2:0:0] 2024-11-21T23:13:28.272353Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T23:13:28.272419Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000003f:1:2:1:0] -> [8000003f:2:2:1:0] 2024-11-21T23:13:28.272561Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:28.272613Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000002f:1:1:0:0] -> [8000002f:2:1:0:0] 2024-11-21T23:13:28.272712Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T23:13:28.272760Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000002f:1:2:2:0] -> [8000002f:2:2:2:0] 2024-11-21T23:13:28.272859Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T23:13:28.272905Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000002f:1:1:1:0] -> [8000002f:2:1:1:0] 2024-11-21T23:13:28.272990Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T23:13:28.273031Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000002f:1:0:0:0] -> [8000002f:2:0:0:0] 2024-11-21T23:13:28.273123Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:28.273170Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000002f:1:1:2:0] -> [8000002f:2:1:2:0] 2024-11-21T23:13:28.273251Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T23:13:28.273295Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000002f:1:0:1:0] -> [8000002f:2:0:1:0] 2024-11-21T23:13:28.273407Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.273442Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000002f:2:0:2:0] PDiskId# 1002 VSlotId# 1009 created 2024-11-21T23:13:28.273531Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000002f:2:0:2:0] status changed to INIT_PENDING 2024-11-21T23:13:28.273637Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T23:13:28.273689Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000002f:1:2:0:0] -> [8000002f:2:2:0:0] 2024-11-21T23:13:28.273771Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T23:13:28.273811Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000002f:1:2:1:0] -> [8000002f:2:2:1:0] 2024-11-21T23:13:28.273958Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:28.274006Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000001f:1:1:0:0] -> [8000001f:2:1:0:0] 2024-11-21T23:13:28.274095Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T23:13:28.274136Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000001f:1:2:2:0] -> [8000001f:2:2:2:0] 2024-11-21T23:13:28.274254Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T23:13:28.274302Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000001f:1:1:1:0] -> [8000001f:2:1:1:0] 2024-11-21T23:13:28.274407Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T23:13:28.274449Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000001f:1:0:0:0] -> [8000001f:2:0:0:0] 2024-11-21T23:13:28.274546Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:28.274593Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000001f:1:1:2:0] -> [8000001f:2:1:2:0] 2024-11-21T23:13:28.274674Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T23:13:28.274731Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000001f:1:0:1:0] -> [8000001f:2:0:1:0] 2024-11-21T23:13:28.274848Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.274885Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000001f:2:0:2:0] PDiskId# 1000 VSlotId# 1010 created 2024-11-21T23:13:28.274941Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000001f:2:0:2:0] status changed to INIT_PENDING 2024-11-21T23:13:28.275049Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T23:13:28.275104Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000001f:1:2:0:0] -> [8000001f:2:2:0:0] 2024-11-21T23:13:28.275191Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T23:13:28.275234Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000001f:1:2:1:0] -> [8000001f:2:2:1:0] 2024-11-21T23:13:28.275369Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2024-11-21T23:13:28.275417Z 18 00h05m00.102048s :BS_NODE DEBUG: [18] VDiskId# [8000000f:1:1:0:0] -> [8000000f:2:1:0:0] 2024-11-21T23:13:28.275499Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2024-11-21T23:13:28.275544Z 36 00h05m00.102048s :BS_NODE DEBUG: [36] VDiskId# [8000000f:1:2:2:0] -> [8000000f:2:2:2:0] 2024-11-21T23:13:28.275658Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2024-11-21T23:13:28.275715Z 21 00h05m00.102048s :BS_NODE DEBUG: [21] VDiskId# [8000000f:1:1:1:0] -> [8000000f:2:1:1:0] 2024-11-21T23:13:28.275809Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2024-11-21T23:13:28.275871Z 6 00h05m00.102048s :BS_NODE DEBUG: [6] VDiskId# [8000000f:1:0:0:0] -> [8000000f:2:0:0:0] 2024-11-21T23:13:28.275960Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:28.276008Z 24 00h05m00.102048s :BS_NODE DEBUG: [24] VDiskId# [8000000f:1:1:2:0] -> [8000000f:2:1:2:0] 2024-11-21T23:13:28.276099Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2024-11-21T23:13:28.276143Z 9 00h05m00.102048s :BS_NODE DEBUG: [9] VDiskId# [8000000f:1:0:1:0] -> [8000000f:2:0:1:0] 2024-11-21T23:13:28.276226Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.276267Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000000f:2:0:2:0] PDiskId# 1001 VSlotId# 1010 created 2024-11-21T23:13:28.276328Z 12 00h05m00.102048s :BS_NODE DEBUG: [12] VDiskId# [8000000f:2:0:2:0] status changed to INIT_PENDING 2024-11-21T23:13:28.276441Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T23:13:28.276489Z 30 00h05m00.102048s :BS_NODE DEBUG: [30] VDiskId# [8000000f:1:2:0:0] -> [8000000f:2:2:0:0] 2024-11-21T23:13:28.276567Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T23:13:28.276614Z 33 00h05m00.102048s :BS_NODE DEBUG: [33] VDiskId# [8000000f:1:2:1:0] -> [8000000f:2:2:1:0] 2024-11-21T23:13:28.282292Z 12 00h05m01.335048s :BS_NODE DEBUG: [12] VDiskId# [8000004f:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:28.283031Z 12 00h05m01.406048s :BS_NODE DEBUG: [12] VDiskId# [8000000f:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:28.283709Z 12 00h05m01.660048s :BS_NODE DEBUG: [12] VDiskId# [8000001f:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:28.284658Z 12 00h05m01.682048s :BS_NODE DEBUG: [12] VDiskId# [8000005f:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:28.285414Z 12 00h05m04.215048s :BS_NODE DEBUG: [12] VDiskId# [8000002f:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:28.286962Z 12 00h05m05.577048s :BS_NODE DEBUG: [12] VDiskId# [8000007f:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:28.287906Z 12 00h05m05.767048s :BS_NODE DEBUG: [12] VDiskId# [8000006f:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:28.288824Z 12 00h05m06.074048s :BS_NODE DEBUG: [12] VDiskId# [8000003f:2:0:2:0] status changed to REPLICATING 2024-11-21T23:13:28.289955Z 12 00h05m10.848048s :BS_NODE DEBUG: [12] VDiskId# [8000006f:2:0:2:0] status changed to READY 2024-11-21T23:13:28.291407Z 12 00h05m10.848560s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.291487Z 12 00h05m10.848560s :BS_NODE DEBUG: [12] VDiskId# [8000006f:1:0:2:0] destroyed 2024-11-21T23:13:28.291669Z 12 00h05m11.279048s :BS_NODE DEBUG: [12] VDiskId# [8000007f:2:0:2:0] status changed to READY 2024-11-21T23:13:28.293100Z 12 00h05m11.279560s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.293151Z 12 00h05m11.279560s :BS_NODE DEBUG: [12] VDiskId# [8000007f:1:0:2:0] destroyed 2024-11-21T23:13:28.294131Z 12 00h05m20.138048s :BS_NODE DEBUG: [12] VDiskId# [8000002f:2:0:2:0] status changed to READY 2024-11-21T23:13:28.295646Z 12 00h05m20.138560s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.295697Z 12 00h05m20.138560s :BS_NODE DEBUG: [12] VDiskId# [8000002f:1:0:2:0] destroyed 2024-11-21T23:13:28.295852Z 12 00h05m21.694048s :BS_NODE DEBUG: [12] VDiskId# [8000005f:2:0:2:0] status changed to READY 2024-11-21T23:13:28.297210Z 12 00h05m21.694560s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.297272Z 12 00h05m21.694560s :BS_NODE DEBUG: [12] VDiskId# [8000005f:1:0:2:0] destroyed 2024-11-21T23:13:28.297449Z 12 00h05m24.024048s :BS_NODE DEBUG: [12] VDiskId# [8000000f:2:0:2:0] status changed to READY 2024-11-21T23:13:28.298905Z 12 00h05m24.024560s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.298970Z 12 00h05m24.024560s :BS_NODE DEBUG: [12] VDiskId# [8000000f:1:0:2:0] destroyed 2024-11-21T23:13:28.300410Z 12 00h05m30.664048s :BS_NODE DEBUG: [12] VDiskId# [8000003f:2:0:2:0] status changed to READY 2024-11-21T23:13:28.301855Z 12 00h05m30.664560s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.301908Z 12 00h05m30.664560s :BS_NODE DEBUG: [12] VDiskId# [8000003f:1:0:2:0] destroyed 2024-11-21T23:13:28.302072Z 12 00h05m31.493048s :BS_NODE DEBUG: [12] VDiskId# [8000001f:2:0:2:0] status changed to READY 2024-11-21T23:13:28.303609Z 12 00h05m31.493560s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.303656Z 12 00h05m31.493560s :BS_NODE DEBUG: [12] VDiskId# [8000001f:1:0:2:0] destroyed 2024-11-21T23:13:28.304333Z 12 00h05m36.214048s :BS_NODE DEBUG: [12] VDiskId# [8000004f:2:0:2:0] status changed to READY 2024-11-21T23:13:28.305882Z 12 00h05m36.214560s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:28.305931Z 12 00h05m36.214560s :BS_NODE DEBUG: [12] VDiskId# [8000004f:1:0:2:0] destroyed >> TSubscriberTest::NotifyDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2024-11-21T23:13:29.195064Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:29.197767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T23:13:29.197866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T23:13:29.197907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T23:13:29.197996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T23:13:29.198076Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T23:13:29.198121Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.198205Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T23:13:29.198246Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.198465Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 1 2024-11-21T23:13:29.198592Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:33:2064], cookie# 1 2024-11-21T23:13:29.198645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2064], cookie# 1 2024-11-21T23:13:29.198685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 1 2024-11-21T23:13:29.198748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2024-11-21T23:13:29.198784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2024-11-21T23:13:29.198834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 1 2024-11-21T23:13:29.198869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T23:13:29.198909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T23:13:29.198960Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.198998Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:34:2064], cookie# 1 2024-11-21T23:13:29.199039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2024-11-21T23:13:29.199074Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:35:2064], cookie# 1 2024-11-21T23:13:29.199101Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2024-11-21T23:13:29.199221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 2 2024-11-21T23:13:29.199308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 2 2024-11-21T23:13:29.199330Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T23:13:29.199364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:34:2064], cookie# 2 2024-11-21T23:13:29.199402Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 2 2024-11-21T23:13:29.199454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2024-11-21T23:13:29.199480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:34:2064], cookie# 2 2024-11-21T23:13:29.199504Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:32:2064][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T23:13:29.199545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T23:13:29.199597Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.199658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:35:2064], cookie# 2 2024-11-21T23:13:29.199693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Unexpected sync response: sender# [1:35:2064], cookie# 2 2024-11-21T23:13:29.199784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:31:2063], cookie# 3 2024-11-21T23:13:29.199863Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:33:2064], cookie# 3 2024-11-21T23:13:29.199885Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2024-11-21T23:13:29.199909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:34:2064], cookie# 3 2024-11-21T23:13:29.199930Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:32:2064][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2024-11-21T23:13:29.199979Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2064], cookie# 3 2024-11-21T23:13:29.200043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2064], cookie# 3 2024-11-21T23:13:29.200064Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Unexpected sync response: sender# [1:35:2064], cookie# 3 2024-11-21T23:13:29.200162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T23:13:29.200194Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2024-11-21T23:13:29.641073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:29.643086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T23:13:29.643191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2024-11-21T23:13:29.643260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2024-11-21T23:13:29.643327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:34:2065] 2024-11-21T23:13:29.643385Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2024-11-21T23:13:29.643436Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Set up state: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.643589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2065] 2024-11-21T23:13:29.643646Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.643932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2024-11-21T23:13:29.643996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2024-11-21T23:13:29.644057Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2024-11-21T23:13:29.644121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:34:2065] 2024-11-21T23:13:29.644188Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:33:2065][path] Path was updated to new version: owner# [1:31:2063], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.644235Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:35:2065] 2024-11-21T23:13:29.644287Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.644351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:33:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2065] 2024-11-21T23:13:29.644393Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:33:2065][path] Path was already updated: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberTest::StrongNotificationAfterCommit >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> TSubscriberTest::ReconnectOnFailure >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T23:12:25.663395Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:25.663476Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:25.685052Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:25.708396Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T23:12:25.709508Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T23:12:25.712004Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T23:12:25.714163Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T23:12:25.716035Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:25.724285Z node 1 :PERSQUEUE INFO: new Cookie default|63a456c5-709551b2-dab7774a-bdcb16c_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:25.732620Z node 1 :PERSQUEUE INFO: new Cookie default|8f920e66-c9d509c5-f58576c4-a7147a85_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:25.754986Z node 1 :PERSQUEUE INFO: new Cookie default|33fdb664-43e041ca-d9fe28ef-f47d2fa1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:240:2057] recipient: [1:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:243:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:244:2057] recipient: [1:242:2244] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:245:2245] sender: [1:246:2057] recipient: [1:242:2244] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:25.843534Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:25.843612Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:12:25.844332Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:296:2288] 2024-11-21T23:12:25.846568Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:297:2289] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T23:12:25.867187Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:296:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T23:12:25.878660Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:297:2289] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:245:2245] sender: [1:329:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T23:12:26.468115Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:26.468205Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:26.495577Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:26.496374Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T23:12:26.496990Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:184:2197] 2024-11-21T23:12:26.498889Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:184:2197] 2024-11-21T23:12:26.500524Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:185:2198] 2024-11-21T23:12:26.502374Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:26.511980Z node 2 :PERSQUEUE INFO: new Cookie default|6853af93-c04b9876-a1d04fe3-c4881879_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:26.524070Z node 2 :PERSQUEUE INFO: new Cookie default|92af48c-7599c0e5-cff47974-49348f89_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:26.571118Z node 2 :PERSQUEUE INFO: new Cookie default|9c89c3e6-30f1878b-3eeeae76-5f6007b8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvOffsets ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:241:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:244:2057] recipient: [2:243:2245] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:245:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:246:2246] sender: [2:247:2057] recipient: [2:243:2245] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:26.630962Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:26.631028Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:12:26.632010Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:297:2289] 2024-11-21T23:12:26.633895Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:298:2290] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartit ... [54:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:177:2057] recipient: [54:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:29.247479Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:13:29.248501Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 54 actor [54:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } 2024-11-21T23:13:29.249118Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:184:2197] 2024-11-21T23:13:29.251452Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:184:2197] 2024-11-21T23:13:29.253118Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:185:2198] 2024-11-21T23:13:29.255230Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:29.264290Z node 54 :PERSQUEUE INFO: new Cookie default|d0da480d-d9163202-adce6eba-fad7a456_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:29.275677Z node 54 :PERSQUEUE INFO: new Cookie default|975411da-48ab1a31-9ab8b442-646c7204_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:29.302439Z node 54 :PERSQUEUE INFO: new Cookie default|7b5bc944-e30ed9d6-96818981-c8cf6a31_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:240:2057] recipient: [54:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:243:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:105:2137] sender: [54:244:2057] recipient: [54:242:2244] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:245:2245] sender: [54:246:2057] recipient: [54:242:2244] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:29.371708Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:13:29.371782Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:13:29.372803Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:296:2288] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T23:13:29.377048Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:297:2289] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T23:13:29.405130Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:297:2289] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T23:13:29.410327Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:296:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [54:245:2245] sender: [54:327:2057] recipient: [54:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:101:2057] recipient: [55:99:2133] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:106:2057] recipient: [55:99:2133] 2024-11-21T23:13:30.046160Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:13:30.046248Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:147:2057] recipient: [55:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:147:2057] recipient: [55:145:2168] Leader for TabletID 72057594037927938 is [55:151:2172] sender: [55:152:2057] recipient: [55:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:177:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:30.085811Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:13:30.086743Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2024-11-21T23:13:30.087454Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:184:2197] 2024-11-21T23:13:30.090632Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T23:13:30.092248Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:185:2198] 2024-11-21T23:13:30.093996Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:30.102149Z node 55 :PERSQUEUE INFO: new Cookie default|7fe58c68-92a842e5-47e467e5-b91bf392_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:30.109362Z node 55 :PERSQUEUE INFO: new Cookie default|c4d0e04-f0c99c06-b625e08-2d59fdcd_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:30.135825Z node 55 :PERSQUEUE INFO: new Cookie default|b208f95c-e74db183-4c5b355-b3e563c4_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:240:2057] recipient: [55:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:243:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [55:105:2137] sender: [55:244:2057] recipient: [55:242:2244] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:245:2245] sender: [55:246:2057] recipient: [55:242:2244] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:13:30.232536Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:13:30.232617Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2024-11-21T23:13:30.233857Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:296:2288] 2024-11-21T23:13:30.236816Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:297:2289] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase 2024-11-21T23:13:30.258877Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:296:2288] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T23:13:30.296120Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:297:2289] Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [55:245:2245] sender: [55:329:2057] recipient: [55:14:2061] Captured TEvents::TSystem::Wakeup to PERSQUEUE_ANS_ACTOR >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2024-11-21T23:13:30.800820Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:30.804321Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T23:13:30.804503Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T23:13:30.804635Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T23:13:30.804715Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T23:13:30.804759Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:32:2064][path] Set up state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.804869Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T23:13:30.805016Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T23:13:30.805068Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.805533Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T23:13:30.805602Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.805647Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T23:13:30.805676Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.805708Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T23:13:30.805736Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.817044Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:43:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T23:13:30.817242Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:33:2064] 2024-11-21T23:13:30.817312Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.817467Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:44:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T23:13:30.817507Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T23:13:30.817567Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:34:2064] 2024-11-21T23:13:30.817613Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.817710Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2064] 2024-11-21T23:13:30.817742Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:32:2064][path] Ignore empty state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.818255Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:43:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2024-11-21T23:13:30.818330Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:33:2064] 2024-11-21T23:13:30.818462Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:32:2064][path] Update to strong state: owner# [2:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2024-11-21T23:13:30.813869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:30.816069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T23:13:30.816178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T23:13:30.816234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2024-11-21T23:13:30.816324Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T23:13:30.816418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T23:13:30.816456Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Set up state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.816532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2064] 2024-11-21T23:13:30.816585Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.817022Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:36:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2024-11-21T23:13:30.817110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:33:2064] 2024-11-21T23:13:30.817166Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:32:2064][path] Update to strong state: owner# [1:31:2063], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:30.817317Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:37:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2024-11-21T23:13:30.817372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:32:2064][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:34:2064] 2024-11-21T23:13:30.817408Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:32:2064][path] Ignore empty state: owner# [1:31:2063], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> TSchemeShardTest::CreateTable >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::RmDirTwice >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardTest::MkRmDir >> TSchemeShardTest::Boot >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardTest::InitRootAgain >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit [GOOD] Test command err: 2024-11-21T23:10:28.927884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873897891798604:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:28.928048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c1b/r3tmp/tmphjZ1DS/pdisk_1.dat 2024-11-21T23:10:29.155034Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:10:29.400132Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:29.403461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:29.403538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:29.410608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6711, node 1 2024-11-21T23:10:29.635026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002c1b/r3tmp/yandexwiiIG8.tmp 2024-11-21T23:10:29.635056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002c1b/r3tmp/yandexwiiIG8.tmp 2024-11-21T23:10:29.637300Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002c1b/r3tmp/yandexwiiIG8.tmp 2024-11-21T23:10:29.637449Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:29.699971Z INFO: TTestServer started on Port 27722 GrpcPort 6711 TClient is connected to server localhost:27722 PQClient connected to localhost:6711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:30.088414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:30.107414Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:30.117337Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:30.122222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:10:30.341874Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:30.353068Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:10:32.427522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873915071668358:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:32.427660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:32.428035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873915071668379:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:32.432811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:32.458442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873915071668381:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:32.780224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.827065Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873915071668473:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:32.828906Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njk0NmUyMTMtZGJhNTY5M2MtNmQ1MmI5MDQtY2YxNDQ2ZmM=, ActorId: [1:7439873915071668355:2305], ActorState: ExecuteState, TraceId: 01jd8fvmz88szgtygevb2d4j6f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:32.831754Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:32.862559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:33.015410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873919366636029:2624] 2024-11-21T23:10:33.928228Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873897891798604:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:33.928304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:39.428478Z :WriteToTopic_Demo_2 INFO: TTopicSdkTestSetup started 2024-11-21T23:10:39.458770Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:39.501848Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873945136440130:2810] connected; active server actors: 1 2024-11-21T23:10:39.502088Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:39.503022Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:39.503153Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:39.505447Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:39.516277Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:39.517335Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:39.517507Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:39.517533Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:39.517549Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:39.517567Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:39.517590Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:39.517611Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:39.517631Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:39.518780Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:39.518814Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:39.518858Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873945136440151:2438], now have 1 active actors on pipe 2024-11-21T23:10:39.518907Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:39.525533Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:39.525566Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873945136440129:2809], now have 1 active actors on pipe 2024-11-21T23:10:39.558906Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439873902186766088 RawX2: 4294969484 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChanne ... 28.668049Z :DEBUG: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] [] Returning serverBytesSize = 0 to budget 0 11 2024-11-21T23:13:28.668228Z :DEBUG: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] [] Commit offsets [1, 12). Partition stream id: 1 2024-11-21T23:13:28.668393Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 grpc read done: success# 1, data# { read_request { bytes_size: 15001457 } } 2024-11-21T23:13:28.668581Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 got read request: guid# 91f50198-fcc75fa4-94ad5238-f99ef4df 2024-11-21T23:13:28.668710Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 1 } } } } 2024-11-21T23:13:28.668928Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 1 end: 12 } } } } 2024-11-21T23:13:28.668945Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 1 prev 0 end 12 by cookie 2 2024-11-21T23:13:28.669083Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 12 prev 0 end 12 by cookie 3 2024-11-21T23:13:28.669148Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T23:13:28.669179Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T23:13:28.669231Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T23:13:28.669250Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T23:13:28.669315Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 1 (startOffset 0) session test-consumer_10_1_3755449797461086615_v1 2024-11-21T23:13:28.669499Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:13:28.670641Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:13:28.670722Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 12 (startOffset 0) session test-consumer_10_1_3755449797461086615_v1 2024-11-21T23:13:28.670875Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2024-11-21T23:13:28.670908Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:13:28.671018Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2024-11-21T23:13:28.671058Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 1 endOffset 12 with cookie 2 2024-11-21T23:13:28.671917Z :DEBUG: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 1 } } 2024-11-21T23:13:28.671399Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 1 2024-11-21T23:13:28.671750Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 12 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:13:28.671784Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:13:28.671834Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 3 2024-11-21T23:13:28.671895Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2024-11-21T23:13:28.671920Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 12 endOffset 12 with cookie 3 2024-11-21T23:13:28.671945Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 12 2024-11-21T23:13:28.672549Z :DEBUG: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 12 } } 2024-11-21T23:13:29.342584Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2024-11-21T23:13:29.342656Z :INFO: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1003 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:13:29.354665Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 checking auth because of timeout 2024-11-21T23:13:29.354753Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 auth for : test-consumer 2024-11-21T23:13:29.355714Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 Handle describe topics response 2024-11-21T23:13:29.355838Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 auth is DEAD 2024-11-21T23:13:29.355922Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 auth ok: topics# 1, initDone# 1 2024-11-21T23:13:30.194294Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:13:30.349988Z :INFO: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] Closing read session. Close timeout: 0.000000s 2024-11-21T23:13:30.350049Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2024-11-21T23:13:30.350096Z :INFO: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2011 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:13:30.350217Z :NOTICE: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:13:30.350267Z :DEBUG: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] [] Abort session to cluster 2024-11-21T23:13:30.352337Z :NOTICE: [/Root] [/Root] [f3fa9006-896416e9-716a7825-d11807ba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:13:30.354185Z :INFO: [/Root] SessionId [test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2024-11-21T23:13:30.354220Z :INFO: [/Root] SessionId [test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0] PartitionId [0] Generation [2] Write session will now close 2024-11-21T23:13:30.354268Z :DEBUG: [/Root] SessionId [test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0] PartitionId [0] Generation [2] Write session: aborting 2024-11-21T23:13:30.356078Z :INFO: [/Root] SessionId [test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2024-11-21T23:13:30.356148Z :DEBUG: [/Root] SessionId [test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0] PartitionId [0] Generation [2] Write session: destroy 2024-11-21T23:13:30.371285Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 grpc read done: success# 0, data# { } 2024-11-21T23:13:30.371293Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0 grpc read done: success: 0 data: 2024-11-21T23:13:30.371314Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0 grpc read failed 2024-11-21T23:13:30.371324Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 grpc read failed 2024-11-21T23:13:30.371360Z node 10 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0 2024-11-21T23:13:30.371362Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 closed 2024-11-21T23:13:30.371378Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|fe206db9-4e01f59d-3182ebf3-38b43a16_0 is DEAD 2024-11-21T23:13:30.371411Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_3755449797461086615_v1 is DEAD 2024-11-21T23:13:30.371759Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:13:30.371867Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439874671164780755:2530] disconnected; active server actors: 1 2024-11-21T23:13:30.371901Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:30.371910Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439874671164780755:2530] client test-consumer disconnected session test-consumer_10_1_3755449797461086615_v1 2024-11-21T23:13:30.371926Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_3755449797461086615_v1 2024-11-21T23:13:30.371956Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439874671164780758:2533] destroyed 2024-11-21T23:13:30.371975Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:30.371994Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439874662574846076:2509] destroyed 2024-11-21T23:13:30.372014Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_3755449797461086615_v1 2024-11-21T23:13:30.372032Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::DependentOps >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> KqpSinkMvcc::OlapNamedStatement [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2024-11-21T23:13:24.264730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:13:24.268439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:13:24.268650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000ff0/r3tmp/tmp4psRrM/pdisk_1.dat 2024-11-21T23:13:24.681029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:13:24.693417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:13:24.734274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:13:24.735353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:13:24.738093Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2024-11-21T23:13:24.738157Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 1 Status# 16 SEND to# [1:380:2375] Proxy marker# C1 2024-11-21T23:13:24.760261Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:24.763243Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2024-11-21T23:13:24.814670Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:307:2347] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2024-11-21T23:13:24.814845Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2024-11-21T23:13:24.815001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:24.815056Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T23:13:24.815098Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:13:24.815144Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T23:13:24.815184Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:13:24.815291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:24.815564Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T23:13:24.815654Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T23:13:24.815697Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T23:13:24.815902Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T23:13:24.816067Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2024-11-21T23:13:24.826866Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2024-11-21T23:13:24.826975Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:307:2347]) 2024-11-21T23:13:24.827100Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T23:13:24.827761Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2024-11-21T23:13:24.827861Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Execute 2024-11-21T23:13:24.827921Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:13:24.828018Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:307:2347])::Complete 2024-11-21T23:13:24.828230Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443335680 } 2024-11-21T23:13:24.828302Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2024-11-21T23:13:24.828352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:24.828531Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2024-11-21T23:13:24.828609Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2024-11-21T23:13:24.828647Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2024-11-21T23:13:24.828790Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2024-11-21T23:13:24.828839Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2024-11-21T23:13:24.828871Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2024-11-21T23:13:24.828909Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2024-11-21T23:13:24.840051Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2024-11-21T23:13:24.840144Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2024-11-21T23:13:24.949427Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2024-11-21T23:13:24.949622Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T23:13:24.950001Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2024-11-21T23:13:24.950424Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2024-11-21T23:13:24.950511Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:380:2375] Proxy 2024-11-21T23:13:24.951886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:13:24.953317Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T23:13:24.953444Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T23:13:24.953487Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2024-11-21T23:13:24.953531Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2024-11-21T23:13:24.954456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:24.954556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2024-11-21T23:13:24.955602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2024-11-21T23:13:24.958855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:13:24.960202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:13:24.960275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:13:24.961119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2024-11-21T23:13:24.964914Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2024-11-21T23:13:24.973628Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2024-11-21T23:13:24.973750Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2024-11-21T23:13:24.974026Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2024-11-21T23:13:24.974115Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2024-11-21T23:13:24.974181Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2024-11-21T23:13:24.974412Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2024-11-21T23:13:24.975137Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2024-11-21T23:13:24.975361Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2024-11-21T23:13:24.976073Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2024-11-21T23:13:24.976437Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2024-11-21T23:13:24.976567Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004596608}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2024-11-21T23:13:24.976647Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004596608}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2024-11-21T23:13:24.976870Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004596608}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2024-11-21T23:13:24.976948Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923004596608}: tablet 72075186224037888 channel 2 assigned to group 2181038080 ... 5186224037888 OK) 2024-11-21T23:13:32.798767Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T23:13:32.798870Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2024-11-21T23:13:32.799193Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2024-11-21T23:13:32.799251Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2024-11-21T23:13:32.799547Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2024-11-21T23:13:32.810826Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T23:13:32.812349Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2024-11-21T23:13:32.812422Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 step# 3001 Status# 16 SEND to# [2:379:2374] Proxy marker# C1 2024-11-21T23:13:32.827083Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2024-11-21T23:13:32.947948Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715666 has been planned 2024-11-21T23:13:32.948093Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2024-11-21T23:13:32.948148Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2024-11-21T23:13:32.948410Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2024-11-21T23:13:32.948902Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715666 marker# C2 2024-11-21T23:13:32.948986Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:379:2374] Proxy 2024-11-21T23:13:32.949517Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:13:32.950207Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715666 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 524 RawX2: 8589937049 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T23:13:32.950277Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:13:32.950536Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:13:32.950588Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:13:32.950642Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T23:13:32.950887Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T23:13:32.951025Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:13:32.951277Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:13:32.951370Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2024-11-21T23:13:32.951883Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:13:32.953405Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T23:13:32.953488Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T23:13:32.953582Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:13:32.956384Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:13:32.956473Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:379:2374], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:13:32.956542Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2024-11-21T23:13:32.956643Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:13:32.956749Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T23:13:32.956854Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2024-11-21T23:13:32.956903Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2024-11-21T23:13:32.956933Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2024-11-21T23:13:32.956982Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 acknowledged 2024-11-21T23:13:32.958034Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2024-11-21T23:13:32.961234Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2024-11-21T23:13:32.961302Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T23:13:32.961870Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2024-11-21T23:13:32.961972Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 1 2024-11-21T23:13:32.962533Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2024-11-21T23:13:32.962989Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T23:13:32.964072Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 INFO ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2024-11-21T23:13:32.964224Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 INFO ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2024-11-21T23:13:32.964349Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 INFO ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:59: Begin, root #43 2024-11-21T23:13:32.964436Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 INFO ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2024-11-21T23:13:32.964511Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 TRACE ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:387: {0}, callable #43 2024-11-21T23:13:32.964608Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 INFO ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:577: Node #43 finished execution 2024-11-21T23:13:32.964706Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 INFO ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2024-11-21T23:13:32.964790Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 INFO ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2024-11-21T23:13:32.964862Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 INFO ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2024-11-21T23:13:32.965038Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.964 NOTE ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2024-11-21T23:13:32.965163Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.965 NOTE ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2024-11-21T23:13:32.965260Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=Y2Q5NWU3NjQtZDNiMWY2ZGYtNGM4MmE0Y2EtMmY4YTRkYzY= 2024-11-21 23:13:32.965 NOTE ydb-core-tx-datashard-ut_minstep(pid=227630, tid=0x00007F08413EEB80) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2024-11-21T23:13:32.981229Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2024-11-21T23:13:32.981508Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2024-11-21T23:13:32.983501Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2024-11-21T23:13:32.984530Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2024-11-21T23:13:32.985062Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2024-11-21T23:13:32.985130Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2024-11-21T23:13:32.985256Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2024-11-21T23:13:32.985392Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2024-11-21T23:13:32.985539Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [GOOD] >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 14265, MsgBus: 8733 2024-11-21T23:12:37.801900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874451917800686:2132];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001170/r3tmp/tmp9vwnIX/pdisk_1.dat 2024-11-21T23:12:38.171369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:38.547374Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:38.553159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:38.553235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:38.559749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14265, node 1 2024-11-21T23:12:38.799897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:38.799919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:38.799926Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:38.800012Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8733 TClient is connected to server localhost:8733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:39.630179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:39.663481Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:12:42.786487Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874451917800686:2132];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:42.786568Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:42.826282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874473392637716:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.826406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.828739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874473392637728:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.833238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:12:42.849202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874473392637730:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:12:43.302277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:12:43.464433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:43.464668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:43.464971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:43.465089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:43.465191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:43.465301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:43.465405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:43.465514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:43.465644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:43.465774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:43.465879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:43.465987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7439874477687605270:2319];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:43.469150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:43.469202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:12:43.469448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:12:43.469578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:12:43.469676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:12:43.469774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:12:43.469886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:12:43.469994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:12:43.470104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:12:43.470233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:12:43.470338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:12:43.470464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7439874477687605295:2326];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:12:43.519503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874477687605271:2320];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:12:43.519561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7439874477687605271:2320];tablet_id=72075186224037890;process= ... ;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.479304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.479411Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.479517Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.479655Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037945;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.479754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037957;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.479852Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037987;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.479951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.480058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037995;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.480152Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.480250Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037933;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.480747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037956;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.480861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.480960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037943;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037949;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037968;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037953;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481446Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037989;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481560Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037965;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037991;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037955;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.481979Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.482073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.482209Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.482569Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.482761Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.482963Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.483096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037911;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.483207Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.483321Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.483438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.483559Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.483674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037963;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.483887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.484100Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.484239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037939;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.484355Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.484457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.484557Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.484654Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.487242Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037915;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.487384Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.487594Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037972;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.487823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.488114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.488203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037975;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2024-11-21T23:13:28.488955Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037937;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.489075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.489183Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.489378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.489481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;tx_state=complete;fline=columnshard_impl.cpp:735;event=skip_indexation;reason=disabled; 2024-11-21T23:13:28.915360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7439874603955372625:2398];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T23:13:28.937728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;task_id=37d93ed6-a85e11ef-9805709d-fb962126;fline=with_appended.cpp:80;portions=3,;task_id=37d93ed6-a85e11ef-9805709d-fb962126; 2024-11-21T23:13:28.938854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037970;task_id=37d984c2-a85e11ef-9626cb15-692ed9ac;fline=with_appended.cpp:80;portions=;task_id=37d984c2-a85e11ef-9626cb15-692ed9ac; 2024-11-21T23:13:28.941002Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[2:7439874603955372615:2390];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T23:13:28.943246Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7439874608250340017:2410];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T23:13:28.945401Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;self_id=[2:7439874603955372484:2365];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T23:13:28.962555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7439874603955372621:2395];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T23:13:28.962765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7439874603955372628:2400];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:66;event=granule_locked;path_id=7; 2024-11-21T23:13:28.979151Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037913;task_id=37ddd5a4-a85e11ef-ae083de9-1c66040;fline=with_appended.cpp:80;portions=;task_id=37ddd5a4-a85e11ef-ae083de9-1c66040; 2024-11-21T23:13:28.979589Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;task_id=37dcbdcc-a85e11ef-899134fe-bdbc9995;fline=with_appended.cpp:80;portions=;task_id=37dcbdcc-a85e11ef-899134fe-bdbc9995; 2024-11-21T23:13:28.979780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;task_id=37dd25fa-a85e11ef-a5f54db7-eb1ed70c;fline=with_appended.cpp:80;portions=;task_id=37dd25fa-a85e11ef-a5f54db7-eb1ed70c; 2024-11-21T23:13:28.980120Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;task_id=37e07430-a85e11ef-a8cb23d8-adcaa532;fline=with_appended.cpp:80;portions=;task_id=37e07430-a85e11ef-a8cb23d8-adcaa532; 2024-11-21T23:13:28.980675Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=37e07fac-a85e11ef-b8ac7311-9584f589;fline=with_appended.cpp:80;portions=;task_id=37e07fac-a85e11ef-b8ac7311-9584f589; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTable |84.2%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> TSchemeShardTest::Restart [GOOD] |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> TSchemeShardTest::ModifyACL [GOOD] |84.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::ConsistentCopyTableRejects >> TSchemeShardTest::CopyTableWithAlterConfig >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardTest::SchemeErrors >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> TSchemeShardTest::DropTable >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> BsControllerTest::SelfHealMirror3dc [GOOD] >> TSchemeShardTest::ParallelCreateTable [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableById >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> TSchemeShardTest::DropTableById >> TSchemeShardTest::CopyTableOmitFollowers >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |84.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |84.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TSchemeShardTest::CopyIndexedTable >> TSchemeShardTest::CopyTableForBackup >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> TSchemeShardTest::MultipleColumnFamilies |84.2%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] |84.2%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> TSchemeShardTest::RejectAlterSolomon [GOOD] |84.2%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> TSchemeShardTest::SimultaneousDropForceDrop >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter |84.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2024-11-21T23:13:26.349182Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2024-11-21T23:13:26.349239Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2024-11-21T23:13:26.349307Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2024-11-21T23:13:26.349329Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2024-11-21T23:13:26.349373Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2024-11-21T23:13:26.349407Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2024-11-21T23:13:26.349463Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2024-11-21T23:13:26.349485Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2024-11-21T23:13:26.349529Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2024-11-21T23:13:26.349552Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2024-11-21T23:13:26.349585Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2024-11-21T23:13:26.349621Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2024-11-21T23:13:26.349664Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2024-11-21T23:13:26.349682Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2024-11-21T23:13:26.349715Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2024-11-21T23:13:26.349737Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2024-11-21T23:13:26.349787Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2024-11-21T23:13:26.349807Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2024-11-21T23:13:26.349845Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2024-11-21T23:13:26.349866Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2024-11-21T23:13:26.349916Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2024-11-21T23:13:26.349937Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2024-11-21T23:13:26.349975Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2024-11-21T23:13:26.349997Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2024-11-21T23:13:26.350053Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2024-11-21T23:13:26.350079Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2024-11-21T23:13:26.350122Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2024-11-21T23:13:26.350141Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2024-11-21T23:13:26.350173Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2024-11-21T23:13:26.350192Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2024-11-21T23:13:26.350228Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2024-11-21T23:13:26.350246Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2024-11-21T23:13:26.350297Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2024-11-21T23:13:26.350319Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2024-11-21T23:13:26.350363Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2024-11-21T23:13:26.350383Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2024-11-21T23:13:26.350437Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2024-11-21T23:13:26.350457Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2024-11-21T23:13:26.350497Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2024-11-21T23:13:26.350517Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2024-11-21T23:13:26.350549Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2024-11-21T23:13:26.350574Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2024-11-21T23:13:26.350657Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2024-11-21T23:13:26.350680Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2024-11-21T23:13:26.350736Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2024-11-21T23:13:26.350764Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2024-11-21T23:13:26.350804Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2024-11-21T23:13:26.350825Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2024-11-21T23:13:26.350876Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2024-11-21T23:13:26.350902Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2024-11-21T23:13:26.350938Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2024-11-21T23:13:26.350957Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2024-11-21T23:13:26.351009Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2024-11-21T23:13:26.351033Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2024-11-21T23:13:26.351087Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2024-11-21T23:13:26.351108Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2024-11-21T23:13:26.351145Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2024-11-21T23:13:26.351165Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2024-11-21T23:13:26.351206Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2024-11-21T23:13:26.351233Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2024-11-21T23:13:26.351278Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2024-11-21T23:13:26.351298Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2024-11-21T23:13:26.351334Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2024-11-21T23:13:26.351353Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2024-11-21T23:13:26.351392Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2024-11-21T23:13:26.351412Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2024-11-21T23:13:26.351448Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2024-11-21T23:13:26.351468Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2024-11-21T23:13:26.351502Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2024-11-21T23:13:26.351522Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2024-11-21T23:13:26.351561Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2024-11-21T23:13:26.351598Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2024-11-21T23:13:26.367674Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2024-11-21T23:13:26.369118Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2024-11-21T23:13:26.369177Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2024-11-21T23:13:26.369252Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2024-11-21T23:13:26.369297Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2024-11-21T23:13:26.369334Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2024-11-21T23:13:26.369369Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2024-11-21T23:13:26.369426Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2024-11-21T23:13:26.369462Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2024-11-21T23:13:26.369505Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2024-11-21T23:13:26.369544Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2024-11-21T23:13:26.369584Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2024-11-21T23:13:26.369621Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2024-11-21T23:13:26.369657Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2024-11-21T23:13:26.369736Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2024-11-21T23:13:26.369776Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2024-11-21T23:13:26.369816Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2024-11-21T23:13:26.369886Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2024-11-21T23:13:26.369932Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2024-11-21T23:13:26.369968Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2024-11-21T23:13:26.370007Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2024-11-21T23:13:26.370048Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2024-11-21T23:13:26.370083Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2024-11-21T23:13:26.370120Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2024-11-21T23:13:26.370165Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2024-11-21T23:13:26.370217Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2024-11-21T23:13:26.370260Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2024-11-21T23:13:26.370314Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2024-11-21T23:13:26.370362Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2024-11-21T23:13:26.370416Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2024-11-21T23:13:26.370453Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2024-11-21T23:13:26.370491Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2024-11-21T23:13:26.370529Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2024-11-21T23:13:26.370593Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2024-11-21T23:13:26.370629Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 23:13:39.819294Z 20 05h45m00.118944s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T23:13:39.819347Z 20 05h45m00.118944s :BS_NODE DEBUG: [20] VDiskId# [80000038:4:1:2:0] -> [80000038:5:1:2:0] 2024-11-21T23:13:39.819417Z 23 05h45m00.118944s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.819524Z 24 05h45m00.118944s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:39.819590Z 24 05h45m00.118944s :BS_NODE DEBUG: [24] VDiskId# [80000038:5:1:0:0] PDiskId# 1002 VSlotId# 1014 created 2024-11-21T23:13:39.819665Z 24 05h45m00.118944s :BS_NODE DEBUG: [24] VDiskId# [80000038:5:1:0:0] status changed to INIT_PENDING 2024-11-21T23:13:39.819772Z 8 05h45m00.118944s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T23:13:39.819827Z 8 05h45m00.118944s :BS_NODE DEBUG: [8] VDiskId# [80000038:4:0:2:0] -> [80000038:5:0:2:0] 2024-11-21T23:13:39.819927Z 26 05h45m00.118944s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2024-11-21T23:13:39.819979Z 26 05h45m00.118944s :BS_NODE DEBUG: [26] VDiskId# [80000038:4:2:0:0] -> [80000038:5:2:0:0] 2024-11-21T23:13:39.820079Z 12 05h45m00.118944s :BS_NODE DEBUG: [12] NodeServiceSetUpdate 2024-11-21T23:13:39.820129Z 12 05h45m00.118944s :BS_NODE DEBUG: [12] VDiskId# [80000038:4:0:1:0] -> [80000038:5:0:1:0] 2024-11-21T23:13:39.820224Z 30 05h45m00.118944s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2024-11-21T23:13:39.820279Z 30 05h45m00.118944s :BS_NODE DEBUG: [30] VDiskId# [80000038:4:2:1:0] -> [80000038:5:2:1:0] 2024-11-21T23:13:39.820382Z 32 05h45m00.118944s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2024-11-21T23:13:39.820437Z 32 05h45m00.118944s :BS_NODE DEBUG: [32] VDiskId# [80000038:4:2:2:0] -> [80000038:5:2:2:0] 2024-11-21T23:13:39.820603Z 17 05h45m00.118944s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2024-11-21T23:13:39.820656Z 17 05h45m00.118944s :BS_NODE DEBUG: [17] VDiskId# [80000027:2:1:0:0] -> [80000027:3:1:0:0] 2024-11-21T23:13:39.820749Z 35 05h45m00.118944s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2024-11-21T23:13:39.820802Z 35 05h45m00.118944s :BS_NODE DEBUG: [35] VDiskId# [80000027:2:2:2:0] -> [80000027:3:2:2:0] 2024-11-21T23:13:39.820909Z 20 05h45m00.118944s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T23:13:39.820956Z 20 05h45m00.118944s :BS_NODE DEBUG: [20] VDiskId# [80000027:2:1:1:0] -> [80000027:3:1:1:0] 2024-11-21T23:13:39.821047Z 5 05h45m00.118944s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T23:13:39.821099Z 5 05h45m00.118944s :BS_NODE DEBUG: [5] VDiskId# [80000027:2:0:0:0] -> [80000027:3:0:0:0] 2024-11-21T23:13:39.821166Z 23 05h45m00.118944s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.821254Z 8 05h45m00.118944s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T23:13:39.821307Z 8 05h45m00.118944s :BS_NODE DEBUG: [8] VDiskId# [80000027:2:0:1:0] -> [80000027:3:0:1:0] 2024-11-21T23:13:39.821397Z 27 05h45m00.118944s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T23:13:39.821451Z 27 05h45m00.118944s :BS_NODE DEBUG: [27] VDiskId# [80000027:2:2:1:0] -> [80000027:3:2:1:0] 2024-11-21T23:13:39.821545Z 11 05h45m00.118944s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T23:13:39.821600Z 11 05h45m00.118944s :BS_NODE DEBUG: [11] VDiskId# [80000027:2:0:2:0] -> [80000027:3:0:2:0] 2024-11-21T23:13:39.821696Z 29 05h45m00.118944s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2024-11-21T23:13:39.821746Z 29 05h45m00.118944s :BS_NODE DEBUG: [29] VDiskId# [80000027:2:2:0:0] -> [80000027:3:2:0:0] 2024-11-21T23:13:39.821850Z 14 05h45m00.118944s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T23:13:39.821899Z 14 05h45m00.118944s :BS_NODE DEBUG: [14] VDiskId# [80000027:3:1:2:0] PDiskId# 1002 VSlotId# 1015 created 2024-11-21T23:13:39.821971Z 14 05h45m00.118944s :BS_NODE DEBUG: [14] VDiskId# [80000027:3:1:2:0] status changed to INIT_PENDING 2024-11-21T23:13:39.822127Z 17 05h45m00.118944s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2024-11-21T23:13:39.822179Z 17 05h45m00.118944s :BS_NODE DEBUG: [17] VDiskId# [80000017:2:1:0:0] -> [80000017:3:1:0:0] 2024-11-21T23:13:39.822270Z 35 05h45m00.118944s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2024-11-21T23:13:39.822323Z 35 05h45m00.118944s :BS_NODE DEBUG: [35] VDiskId# [80000017:2:2:2:0] -> [80000017:3:2:2:0] 2024-11-21T23:13:39.824547Z 20 05h45m00.118944s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T23:13:39.824637Z 20 05h45m00.118944s :BS_NODE DEBUG: [20] VDiskId# [80000017:2:1:1:0] -> [80000017:3:1:1:0] 2024-11-21T23:13:39.824762Z 5 05h45m00.118944s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T23:13:39.824812Z 5 05h45m00.118944s :BS_NODE DEBUG: [5] VDiskId# [80000017:2:0:0:0] -> [80000017:3:0:0:0] 2024-11-21T23:13:39.824885Z 23 05h45m00.118944s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.824974Z 24 05h45m00.118944s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2024-11-21T23:13:39.825019Z 24 05h45m00.118944s :BS_NODE DEBUG: [24] VDiskId# [80000017:3:1:2:0] PDiskId# 1002 VSlotId# 1015 created 2024-11-21T23:13:39.825109Z 24 05h45m00.118944s :BS_NODE DEBUG: [24] VDiskId# [80000017:3:1:2:0] status changed to INIT_PENDING 2024-11-21T23:13:39.825217Z 8 05h45m00.118944s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T23:13:39.825272Z 8 05h45m00.118944s :BS_NODE DEBUG: [8] VDiskId# [80000017:2:0:1:0] -> [80000017:3:0:1:0] 2024-11-21T23:13:39.825360Z 11 05h45m00.118944s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T23:13:39.825410Z 11 05h45m00.118944s :BS_NODE DEBUG: [11] VDiskId# [80000017:2:0:2:0] -> [80000017:3:0:2:0] 2024-11-21T23:13:39.825500Z 29 05h45m00.118944s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2024-11-21T23:13:39.825552Z 29 05h45m00.118944s :BS_NODE DEBUG: [29] VDiskId# [80000017:2:2:0:0] -> [80000017:3:2:0:0] 2024-11-21T23:13:39.825651Z 33 05h45m00.118944s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2024-11-21T23:13:39.825707Z 33 05h45m00.118944s :BS_NODE DEBUG: [33] VDiskId# [80000017:2:2:1:0] -> [80000017:3:2:1:0] 2024-11-21T23:13:39.825886Z 17 05h45m00.118944s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2024-11-21T23:13:39.825944Z 17 05h45m00.118944s :BS_NODE DEBUG: [17] VDiskId# [80000007:2:1:0:0] -> [80000007:3:1:0:0] 2024-11-21T23:13:39.826049Z 35 05h45m00.118944s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2024-11-21T23:13:39.826100Z 35 05h45m00.118944s :BS_NODE DEBUG: [35] VDiskId# [80000007:2:2:2:0] -> [80000007:3:2:2:0] 2024-11-21T23:13:39.826186Z 20 05h45m00.118944s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2024-11-21T23:13:39.826239Z 20 05h45m00.118944s :BS_NODE DEBUG: [20] VDiskId# [80000007:2:1:1:0] -> [80000007:3:1:1:0] 2024-11-21T23:13:39.826341Z 5 05h45m00.118944s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2024-11-21T23:13:39.826408Z 5 05h45m00.118944s :BS_NODE DEBUG: [5] VDiskId# [80000007:2:0:0:0] -> [80000007:3:0:0:0] 2024-11-21T23:13:39.826483Z 23 05h45m00.118944s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.826566Z 8 05h45m00.118944s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2024-11-21T23:13:39.826618Z 8 05h45m00.118944s :BS_NODE DEBUG: [8] VDiskId# [80000007:2:0:1:0] -> [80000007:3:0:1:0] 2024-11-21T23:13:39.826715Z 27 05h45m00.118944s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2024-11-21T23:13:39.826766Z 27 05h45m00.118944s :BS_NODE DEBUG: [27] VDiskId# [80000007:2:2:1:0] -> [80000007:3:2:1:0] 2024-11-21T23:13:39.826862Z 11 05h45m00.118944s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2024-11-21T23:13:39.826908Z 11 05h45m00.118944s :BS_NODE DEBUG: [11] VDiskId# [80000007:2:0:2:0] -> [80000007:3:0:2:0] 2024-11-21T23:13:39.827005Z 29 05h45m00.118944s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2024-11-21T23:13:39.827060Z 29 05h45m00.118944s :BS_NODE DEBUG: [29] VDiskId# [80000007:2:2:0:0] -> [80000007:3:2:0:0] 2024-11-21T23:13:39.827158Z 14 05h45m00.118944s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2024-11-21T23:13:39.827200Z 14 05h45m00.118944s :BS_NODE DEBUG: [14] VDiskId# [80000007:3:1:2:0] PDiskId# 1002 VSlotId# 1016 created 2024-11-21T23:13:39.827277Z 14 05h45m00.118944s :BS_NODE DEBUG: [14] VDiskId# [80000007:3:1:2:0] status changed to INIT_PENDING 2024-11-21T23:13:39.835413Z 14 05h45m01.298944s :BS_NODE DEBUG: [14] VDiskId# [80000037:3:1:2:0] status changed to REPLICATING 2024-11-21T23:13:39.836199Z 14 05h45m01.931944s :BS_NODE DEBUG: [14] VDiskId# [80000077:3:1:2:0] status changed to REPLICATING 2024-11-21T23:13:39.836970Z 14 05h45m02.073944s :BS_NODE DEBUG: [14] VDiskId# [80000007:3:1:2:0] status changed to REPLICATING 2024-11-21T23:13:39.838140Z 24 05h45m02.249944s :BS_NODE DEBUG: [24] VDiskId# [80000017:3:1:2:0] status changed to REPLICATING 2024-11-21T23:13:39.838648Z 24 05h45m03.028944s :BS_NODE DEBUG: [24] VDiskId# [80000047:3:1:2:0] status changed to REPLICATING 2024-11-21T23:13:39.842090Z 24 05h45m03.837944s :BS_NODE DEBUG: [24] VDiskId# [80000038:5:1:0:0] status changed to REPLICATING 2024-11-21T23:13:39.842712Z 24 05h45m04.158944s :BS_NODE DEBUG: [24] VDiskId# [80000067:3:1:2:0] status changed to REPLICATING 2024-11-21T23:13:39.843595Z 14 05h45m04.619944s :BS_NODE DEBUG: [14] VDiskId# [80000027:3:1:2:0] status changed to REPLICATING 2024-11-21T23:13:39.846011Z 14 05h45m06.078944s :BS_NODE DEBUG: [14] VDiskId# [80000057:3:1:2:0] status changed to REPLICATING 2024-11-21T23:13:39.846873Z 24 05h45m09.468944s :BS_NODE DEBUG: [24] VDiskId# [80000047:3:1:2:0] status changed to READY 2024-11-21T23:13:39.848674Z 23 05h45m09.469456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.848753Z 23 05h45m09.469456s :BS_NODE DEBUG: [23] VDiskId# [80000047:2:1:2:0] destroyed 2024-11-21T23:13:39.849501Z 24 05h45m12.947944s :BS_NODE DEBUG: [24] VDiskId# [80000067:3:1:2:0] status changed to READY 2024-11-21T23:13:39.850855Z 23 05h45m12.948456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.850935Z 23 05h45m12.948456s :BS_NODE DEBUG: [23] VDiskId# [80000067:2:1:2:0] destroyed 2024-11-21T23:13:39.852072Z 14 05h45m16.696944s :BS_NODE DEBUG: [14] VDiskId# [80000037:3:1:2:0] status changed to READY 2024-11-21T23:13:39.853862Z 23 05h45m16.697456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.853929Z 23 05h45m16.697456s :BS_NODE DEBUG: [23] VDiskId# [80000037:2:1:2:0] destroyed 2024-11-21T23:13:39.854171Z 14 05h45m19.676944s :BS_NODE DEBUG: [14] VDiskId# [80000057:3:1:2:0] status changed to READY 2024-11-21T23:13:39.855395Z 23 05h45m19.677456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.855461Z 23 05h45m19.677456s :BS_NODE DEBUG: [23] VDiskId# [80000057:2:1:2:0] destroyed 2024-11-21T23:13:39.855661Z 24 05h45m19.864944s :BS_NODE DEBUG: [24] VDiskId# [80000038:5:1:0:0] status changed to READY 2024-11-21T23:13:39.856679Z 23 05h45m19.865456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.856737Z 23 05h45m19.865456s :BS_NODE DEBUG: [23] VDiskId# [80000038:4:1:0:0] destroyed 2024-11-21T23:13:39.857866Z 14 05h45m25.108944s :BS_NODE DEBUG: [14] VDiskId# [80000077:3:1:2:0] status changed to READY 2024-11-21T23:13:39.859351Z 23 05h45m25.109456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.859411Z 23 05h45m25.109456s :BS_NODE DEBUG: [23] VDiskId# [80000077:2:1:2:0] destroyed 2024-11-21T23:13:39.860950Z 14 05h45m33.068944s :BS_NODE DEBUG: [14] VDiskId# [80000027:3:1:2:0] status changed to READY 2024-11-21T23:13:39.862269Z 23 05h45m33.069456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.862331Z 23 05h45m33.069456s :BS_NODE DEBUG: [23] VDiskId# [80000027:2:1:2:0] destroyed 2024-11-21T23:13:39.863563Z 14 05h45m33.483944s :BS_NODE DEBUG: [14] VDiskId# [80000007:3:1:2:0] status changed to READY 2024-11-21T23:13:39.864826Z 23 05h45m33.484456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.864886Z 23 05h45m33.484456s :BS_NODE DEBUG: [23] VDiskId# [80000007:2:1:2:0] destroyed 2024-11-21T23:13:39.865058Z 24 05h45m33.652944s :BS_NODE DEBUG: [24] VDiskId# [80000017:3:1:2:0] status changed to READY 2024-11-21T23:13:39.866067Z 23 05h45m33.653456s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2024-11-21T23:13:39.866122Z 23 05h45m33.653456s :BS_NODE DEBUG: [23] VDiskId# [80000017:2:1:2:0] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [GOOD] Test command err: 2024-11-21T23:10:28.452469Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873896961359508:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:28.462806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:28.621755Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c21/r3tmp/tmpb0Gj2h/pdisk_1.dat 2024-11-21T23:10:28.826988Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17503, node 1 2024-11-21T23:10:28.871160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:28.871258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:28.939020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:29.023127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002c21/r3tmp/yandexYWfbhy.tmp 2024-11-21T23:10:29.023170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002c21/r3tmp/yandexYWfbhy.tmp 2024-11-21T23:10:29.023324Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002c21/r3tmp/yandexYWfbhy.tmp 2024-11-21T23:10:29.023415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:29.094438Z INFO: TTestServer started on Port 21187 GrpcPort 17503 TClient is connected to server localhost:21187 PQClient connected to localhost:17503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:29.416417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:29.435398Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:29.456931Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:29.466481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:10:31.963417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873909846262017:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:31.963551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:31.965185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873909846262044:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:31.975547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:32.013344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873909846262090:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:32.014227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:32.014359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T23:10:32.014579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873909846262055:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:32.238634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.265000Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873914141229420:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:32.267862Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGExZWU2NmQtZDEyMDUwOGUtNzRjYTczZS0yOTQzMGMyOQ==, ActorId: [1:7439873909846262012:2305], ActorState: ExecuteState, TraceId: 01jd8fvmfkc3zxn6tjn6cwgcjp, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:32.272618Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:32.285549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:32.376524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873914141229700:2621] 2024-11-21T23:10:33.458536Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873896961359508:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:33.500733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:39.005121Z :WriteToTopic_Demo_1 INFO: TTopicSdkTestSetup started 2024-11-21T23:10:39.020669Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:39.045618Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873944206001094:2809] connected; active server actors: 1 2024-11-21T23:10:39.046502Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:39.047425Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:39.047544Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:39.048565Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:39.056642Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:39.057481Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:39.057688Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:39.057873Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:39.057899Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:39.057920Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:39.057952Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:39.057979Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:39.058011Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:39.058028Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:39.059624Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:39.059636Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:39.059691Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873944206001120:2435], now have 1 active actors on pipe 2024-11-21T23:10:39.086157Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:39.086199Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873944206001092:2808], now have 1 active actors on pipe 2024-11-21T23:10:39.102298Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439873896961359791 RawX2: 4294969495 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 Lifeti ... ession cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 18 prev 12 end 22 by cookie 4 2024-11-21T23:13:34.164694Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T23:13:34.164735Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T23:13:34.164857Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 18 (startOffset 0) session test-consumer_10_1_13055387893047277675_v1 2024-11-21T23:13:34.165008Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:13:34.166467Z :DEBUG: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:13:34.166629Z :DEBUG: [/Root] Take Data. Partition 0. Read: {4, 0} (18-18) 2024-11-21T23:13:34.166666Z :DEBUG: [/Root] Take Data. Partition 0. Read: {4, 1} (19-19) 2024-11-21T23:13:34.166698Z :DEBUG: [/Root] Take Data. Partition 0. Read: {4, 2} (20-20) 2024-11-21T23:13:34.166726Z :DEBUG: [/Root] Take Data. Partition 0. Read: {4, 3} (21-21) 2024-11-21T23:13:34.166215Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:13:34.166274Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 4 2024-11-21T23:13:34.166771Z :DEBUG: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] [] The application data is transferred to the client. Number of messages 4, size 400000 bytes 2024-11-21T23:13:34.166358Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2024-11-21T23:13:34.166422Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 18 endOffset 22 with cookie 4 2024-11-21T23:13:34.166802Z :DEBUG: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:13:34.166465Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 18 0 4 2024-11-21T23:13:34.166994Z :DEBUG: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] [] Commit offsets [18, 22). Partition stream id: 1 2024-11-21T23:13:34.167184Z :DEBUG: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 18 } } 2024-11-21T23:13:34.167570Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 18 end: 22 } } } } 2024-11-21T23:13:34.167733Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 22 prev 18 end 22 by cookie 5 2024-11-21T23:13:34.168079Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T23:13:34.168112Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T23:13:34.168218Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 22 (startOffset 0) session test-consumer_10_1_13055387893047277675_v1 2024-11-21T23:13:34.168352Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:13:34.169651Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 22 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:13:34.169707Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:13:34.169759Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 5 2024-11-21T23:13:34.169838Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 5 } 2024-11-21T23:13:34.169873Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 22 endOffset 22 with cookie 5 2024-11-21T23:13:34.169913Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 replying for commits: assignId# 1, from# 5, to# 5, offset# 22 2024-11-21T23:13:34.170616Z :DEBUG: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 22 } } 2024-11-21T23:13:34.859533Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:21:22 2024-11-21T23:13:34.859639Z :INFO: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1001 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:13:34.871065Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 checking auth because of timeout 2024-11-21T23:13:34.871153Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 auth for : test-consumer 2024-11-21T23:13:34.872104Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 Handle describe topics response 2024-11-21T23:13:34.872225Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 auth is DEAD 2024-11-21T23:13:34.872330Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 auth ok: topics# 1, initDone# 1 2024-11-21T23:13:34.911110Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:13:34.911141Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:35.872050Z :INFO: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] Closing read session. Close timeout: 0.000000s 2024-11-21T23:13:35.872105Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:21:22 2024-11-21T23:13:35.872160Z :INFO: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2014 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:13:35.872276Z :NOTICE: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:13:35.872324Z :DEBUG: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] [] Abort session to cluster 2024-11-21T23:13:35.873372Z :NOTICE: [/Root] [/Root] [6c9a1ffa-91c0b1b2-fe82ed89-550c400d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:13:35.873565Z node 10 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 grpc read done: success# 0, data# { } 2024-11-21T23:13:35.873621Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 grpc read failed 2024-11-21T23:13:35.873659Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 closed 2024-11-21T23:13:35.873720Z node 10 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_10_1_13055387893047277675_v1 is DEAD 2024-11-21T23:13:35.873978Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:35.874011Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_10_1_13055387893047277675_v1 2024-11-21T23:13:35.874034Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439874692338213464:2525] destroyed 2024-11-21T23:13:35.874074Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_10_1_13055387893047277675_v1 2024-11-21T23:13:35.874120Z node 10 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [10:7439874692338213461:2522] disconnected; active server actors: 1 2024-11-21T23:13:35.874149Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [10:7439874692338213461:2522] client test-consumer disconnected session test-consumer_10_1_13055387893047277675_v1 2024-11-21T23:13:35.874739Z :INFO: [/Root] SessionId [test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2024-11-21T23:13:35.874776Z :INFO: [/Root] SessionId [test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0] PartitionId [0] Generation [2] Write session will now close 2024-11-21T23:13:35.875045Z :DEBUG: [/Root] SessionId [test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0] PartitionId [0] Generation [2] Write session: aborting 2024-11-21T23:13:35.875604Z :INFO: [/Root] SessionId [test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2024-11-21T23:13:35.875642Z :DEBUG: [/Root] SessionId [test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0] PartitionId [0] Generation [2] Write session: destroy 2024-11-21T23:13:35.876645Z node 10 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0 grpc read done: success: 0 data: 2024-11-21T23:13:35.876681Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0 grpc read failed 2024-11-21T23:13:35.876726Z node 10 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0 2024-11-21T23:13:35.876743Z node 10 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|e3e8ba8d-4ca32d8b-8cdd7165-41819ba4_0 is DEAD 2024-11-21T23:13:35.877144Z node 10 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:13:35.877270Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:35.877316Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [10:7439874688043246077:2496] destroyed 2024-11-21T23:13:35.877370Z node 10 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> TSettingsValidation::TestDifferentDedupParams [GOOD] >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table |84.3%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TSettingsValidation::TestDifferentDedupParams [GOOD] Test command err: 2024-11-21T23:10:29.291007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873902648745062:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:29.291050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:29.661297Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c08/r3tmp/tmp3Tt7Yp/pdisk_1.dat 2024-11-21T23:10:29.987170Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:10:30.012317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:30.014923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:30.025081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18786, node 1 2024-11-21T23:10:30.231080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002c08/r3tmp/yandexqZizm0.tmp 2024-11-21T23:10:30.231103Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002c08/r3tmp/yandexqZizm0.tmp 2024-11-21T23:10:30.233523Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002c08/r3tmp/yandexqZizm0.tmp 2024-11-21T23:10:30.233615Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:30.350246Z INFO: TTestServer started on Port 10803 GrpcPort 18786 TClient is connected to server localhost:10803 PQClient connected to localhost:18786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:30.775887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:30.804228Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:30.829807Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:30.850251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:10:31.012730Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:10:33.135396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873919828614898:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.135541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.136025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873919828614910:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:33.146205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:33.172622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873919828614912:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:33.485881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:33.502098Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873919828614986:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:33.512594Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGQ5YmVjMDYtZWQ2YzZhYzUtYjg2MWQ4OGUtMTcyMWMxYzM=, ActorId: [1:7439873919828614895:2307], ActorState: ExecuteState, TraceId: 01jd8fvnkt2maewgt7d10xym84, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:33.514868Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:33.572895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:33.708544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873924123582562:2624] 2024-11-21T23:10:34.292834Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873902648745062:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:34.292899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:40.061007Z :Restarts INFO: TTopicSdkTestSetup started 2024-11-21T23:10:40.081038Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:40.100147Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873949893386664:2811] connected; active server actors: 1 2024-11-21T23:10:40.100776Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:40.106893Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:40.107040Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:40.108467Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:40.114677Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:40.115653Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:40.115905Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:40.116108Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:40.116140Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:40.116158Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:40.116178Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:40.116211Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:40.116253Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:40.116271Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:40.117488Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:40.117557Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873949893386685:2437], now have 1 active actors on pipe 2024-11-21T23:10:40.117578Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:40.141557Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:40.141603Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873949893386660:2808], now have 1 active actors on pipe 2024-11-21T23:10:40.141659Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T23:10:40.166201Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439873902648745325:2189] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "te ... 3d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 1 2024-11-21T23:13:47.094528Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.094651Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 2 written { offset: 1551 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 10000000 } max_queue_wait_time { nanos: 10000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.094675Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=2, txId=? 2024-11-21T23:13:47.094700Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 2 2024-11-21T23:13:47.094936Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.095031Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 3 written { offset: 1552 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 5000000 } max_queue_wait_time { nanos: 5000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.095122Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=3, txId=? 2024-11-21T23:13:47.095142Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 3 2024-11-21T23:13:47.095343Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.095429Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 4 written { offset: 1553 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 5000000 } max_queue_wait_time { nanos: 5000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.095448Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=4, txId=? 2024-11-21T23:13:47.095467Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 4 2024-11-21T23:13:47.095674Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.095752Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 5 written { offset: 1554 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 5000000 } max_queue_wait_time { nanos: 5000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.095773Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=5, txId=? 2024-11-21T23:13:47.095789Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 5 2024-11-21T23:13:47.095965Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.096042Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 6 written { offset: 1555 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 5000000 } max_queue_wait_time { nanos: 5000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.096058Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=6, txId=? 2024-11-21T23:13:47.096075Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 6 2024-11-21T23:13:47.096248Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.096320Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 7 written { offset: 1556 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 5000000 } max_queue_wait_time { nanos: 5000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.096337Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=7, txId=? 2024-11-21T23:13:47.096353Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 7 2024-11-21T23:13:47.096527Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.096591Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 8 written { offset: 1557 } } write_statistics { persisting_time { nanos: 6000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.096604Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=8, txId=? 2024-11-21T23:13:47.096619Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 8 2024-11-21T23:13:47.096757Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.096811Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 9 written { offset: 1558 } } write_statistics { persisting_time { nanos: 6000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.096825Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=9, txId=? 2024-11-21T23:13:47.096839Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 9 2024-11-21T23:13:47.096966Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.097020Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 10 written { offset: 1559 } } write_statistics { persisting_time { nanos: 6000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.097033Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=10, txId=? 2024-11-21T23:13:47.097048Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 10 2024-11-21T23:13:47.097167Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:13:47.097224Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 11 written { offset: 1560 } } write_statistics { persisting_time { nanos: 6000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:13:47.097237Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] OnAck: seqNo=11, txId=? 2024-11-21T23:13:47.097251Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: acknoledged message 11 2024-11-21T23:13:47.097783Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write 1 messages with Id from 771 to 771 2024-11-21T23:13:47.097850Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: try to update token 2024-11-21T23:13:47.097873Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 771 2024-11-21T23:13:47.097961Z :INFO: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T23:13:47.098000Z :INFO: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T23:13:47.098059Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T23:13:47.098288Z :WARNING: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T23:13:47.102001Z :DEBUG: [/Root] SessionId [1922523d-58c07162-d9e1e866-d8f44419|e7c64a70-1bc5f2b9-ccc2f8f9-a631a314_0] PartitionId [0] Generation [1] Write session: destroy === === END TEST (supposed ok)=== === >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> RetryPolicy::RetryWithBatching [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> KqpSystemView::QueryStatsSimple [GOOD] |84.3%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |84.3%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |84.3%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |84.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 6609, MsgBus: 63816 2024-11-21T23:12:20.524331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874377941978124:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:20.524424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:20.562844Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874378962489371:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:20.562897Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:20.626671Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439874381065532575:2248];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fe6/r3tmp/tmp7bfvSt/pdisk_1.dat 2024-11-21T23:12:20.900854Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:21.104739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:21.104906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:21.105090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:21.105157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:21.105596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:21.105644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:21.111808Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:12:21.111855Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:12:21.112048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:21.112509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:21.113366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:21.129476Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6609, node 1 2024-11-21T23:12:21.190613Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:21.190661Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:12:21.266318Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:21.266342Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:21.266354Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:21.266453Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63816 TClient is connected to server localhost:63816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:12:22.085412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:22.146139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:22.301492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:22.633302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:22.730482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:12:25.127481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874395121849314:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:25.142853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:25.196720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:12:25.303848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:12:25.362536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:12:25.526115Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874377941978124:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:25.526176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:25.534591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:12:25.566519Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874378962489371:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:25.566584Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:25.581065Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439874381065532575:2248];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:12:25.581119Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:12:25.670785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:12:25.768855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:12:25.931011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874399416817299:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:25.931105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:25.931387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874399416817304:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:25.952138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:12:25.989346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874399416817306:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:12:28.333766Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230748302, txId: 281474976710673] shutting down 2024-11-21T23:12:28.639927Z node 3 :BS_PROXY_PUT ERROR: [5db961e63887b866] Result# TEvPutResult {Id# [72075186224037894:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:12:28.679963Z node 2 :BS_PROXY_PUT ERROR: [5e425ec63c20d5d3] Result# TEvPutResult {Id# [72075186224037895:1:22:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Trying to start YDB, gRPC: 25069, MsgBus: 23691 2024-11-21T23:12:32.351313Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439874431908074223:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fe6/r3tmp/tmpfy3Amq/pdisk_1.dat 2024-11-21T23:12:32.618278Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:12:32.655562Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/in ... ction 281474976710668 completed, doublechecking } 2024-11-21T23:13:33.670038Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230813658, txId: 281474976710673] shutting down 2024-11-21T23:13:33.979130Z node 14 :BS_PROXY_PUT ERROR: [a1e5308ca0ba5be2] Result# TEvPutResult {Id# [72075186224037891:1:21:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:13:33.990519Z node 15 :BS_PROXY_PUT ERROR: [5249564b65dd9c66] Result# TEvPutResult {Id# [72075186224037889:1:21:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Trying to start YDB, gRPC: 31748, MsgBus: 29338 2024-11-21T23:13:37.544231Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7439874710672847677:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:37.545351Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:13:37.595649Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7439874712410028414:2069];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:37.599270Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7439874709638587782:2089];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:37.600367Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:13:37.602652Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fe6/r3tmp/tmpYVl3nc/pdisk_1.dat 2024-11-21T23:13:38.014787Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:38.090689Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:38.090821Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:38.092266Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:38.092359Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:38.093253Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:13:38.093317Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:13:38.095620Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:38.107145Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2024-11-21T23:13:38.107196Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2024-11-21T23:13:38.108202Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:13:38.113479Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31748, node 16 2024-11-21T23:13:38.271065Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:13:38.271090Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:13:38.271102Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:13:38.271238Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29338 TClient is connected to server localhost:29338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:13:39.316117Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:39.357435Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:39.693890Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:39.889230Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:40.018319Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:13:42.542293Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7439874710672847677:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:42.542421Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:13:42.595301Z node 17 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7439874712410028414:2069];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:42.595409Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:13:42.599776Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7439874709638587782:2089];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:13:42.599854Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:13:44.396354Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439874740737620759:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:44.407016Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:44.452555Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:13:44.619323Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:13:45.001015Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:13:45.211656Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:13:45.360008Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:13:45.643241Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:13:45.809261Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439874745032588758:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:45.809386Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:45.809899Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439874745032588763:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:13:45.828610Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:13:45.879757Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439874745032588765:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:13:50.643051Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230830608, txId: 281474976715673] shutting down 2024-11-21T23:13:51.005307Z node 17 :BS_PROXY_PUT ERROR: [ce0d7a51fe06403f] Result# TEvPutResult {Id# [72075186224037891:1:26:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2024-11-21T23:13:51.027641Z node 18 :BS_PROXY_PUT ERROR: [da6d3c503018343e] Result# TEvPutResult {Id# [72075186224037893:1:26:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2024-11-21T23:07:43.880552Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:43.880584Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:43.880608Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:07:43.881371Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:43.881445Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:43.881477Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:43.882817Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007923s 2024-11-21T23:07:43.884033Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:43.884070Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:43.884099Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:43.884143Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006469s 2024-11-21T23:07:43.884571Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:07:43.884609Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:43.884634Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:07:43.884705Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009935s 2024-11-21T23:07:43.896396Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1732230463896362 2024-11-21T23:07:44.398724Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873195203256427:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:44.419794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0013a4/r3tmp/tmpfcbM4y/pdisk_1.dat 2024-11-21T23:07:44.820531Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:44.833438Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:07:44.934346Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:07:45.306971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:45.307072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:45.317246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:45.317964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:07:45.318015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:07:45.399965Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:07:45.435378Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:07:45.436314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:07:45.438833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 29065, node 1 2024-11-21T23:07:45.459035Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:45.459062Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:07:45.836666Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0013a4/r3tmp/yandex4NJIah.tmp 2024-11-21T23:07:45.836689Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0013a4/r3tmp/yandex4NJIah.tmp 2024-11-21T23:07:45.836826Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0013a4/r3tmp/yandex4NJIah.tmp 2024-11-21T23:07:45.836918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:07:45.968376Z INFO: TTestServer started on Port 7157 GrpcPort 29065 TClient is connected to server localhost:7157 PQClient connected to localhost:29065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:07:46.510871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:07:49.366725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873195203256427:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:07:49.366795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:07:50.721681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873221453976226:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:50.721956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873221453976193:2282], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:50.722102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:07:50.729953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:07:50.757694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873221453976231:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:07:51.162900Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873221453976266:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:51.164715Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODViYTk2NzMtOTM3N2ZjYjgtMmUxZDkwMjItNDJlNjM3NDM=, ActorId: [2:7439873221453976190:2280], ActorState: ExecuteState, TraceId: 01jd8fpq1q1sw45qnj9yvxfyx4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:51.167583Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:51.168428Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873220973061306:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:07:51.170145Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDI1NGQ4OTMtNTY0ODc4NmQtOTA5Mjk1NjEtZTNiM2M2Y2I=, ActorId: [1:7439873220973061274:2306], ActorState: ExecuteState, TraceId: 01jd8fpq6b5ttym911e1a060dt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:07:51.173515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:07:51.178644Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:07:51.531879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:07:51.899173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:29065", true, true, 1000); 2024-11-21T23:07:52.397978Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8fpreg7mbj5 ... DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2024-11-21T23:13:49.986712Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2024-11-21T23:13:49.986753Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2024-11-21T23:13:49.988044Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1732230829985 2024-11-21T23:13:49.988455Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:13:50.015736Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 2024-11-21T23:13:50.015822Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.015902Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T23:13:50.015938Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.015976Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T23:13:50.015997Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.016031Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T23:13:50.016052Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.030788Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 29 queued_in_partition_duration_ms: 1 } 2024-11-21T23:13:50.030872Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 1 2024-11-21T23:13:50.030961Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 2 2024-11-21T23:13:50.030990Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 3 2024-11-21T23:13:50.031098Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 4 2024-11-21T23:13:50.031127Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 5 2024-11-21T23:13:50.031153Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 6 2024-11-21T23:13:50.031177Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 7 2024-11-21T23:13:50.031198Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 8 2024-11-21T23:13:50.031239Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 9 2024-11-21T23:13:50.031269Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: acknoledged message 10 2024-11-21T23:13:50.016085Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2024-11-21T23:13:50.038888Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: close. Timeout = 0 ms 2024-11-21T23:13:50.038963Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session will now close 2024-11-21T23:13:50.039024Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: aborting 2024-11-21T23:13:50.016105Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.016139Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2024-11-21T23:13:50.016160Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.016196Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2024-11-21T23:13:50.016233Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.016266Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2024-11-21T23:13:50.016289Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.016324Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2024-11-21T23:13:50.016345Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.016392Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2024-11-21T23:13:50.016422Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:13:50.016458Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2024-11-21T23:13:50.016712Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:13:50.016751Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T23:13:50.016977Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T23:13:50.017142Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T23:13:50.017701Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2024-11-21T23:13:50.017748Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T23:13:50.018179Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T23:13:50.018213Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T23:13:50.018318Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732230829984 queuesize 0 startOffset 0 2024-11-21T23:13:50.071216Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:13:50.071294Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0] Write session: destroy 2024-11-21T23:13:50.094368Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0 grpc read done: success: 0 data: 2024-11-21T23:13:50.094433Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0 grpc read failed 2024-11-21T23:13:50.094471Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0 grpc closed 2024-11-21T23:13:50.094501Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|811a45a7-a6b3b7b9-bbc42cbd-67555f8c_0 is DEAD 2024-11-21T23:13:50.095676Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:13:50.100349Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:13:50.100420Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7439874761106367927:2612] destroyed 2024-11-21T23:13:50.100827Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.762985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.763817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.763868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.763916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.763958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.763998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.764056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.880095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.880152Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.919999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.924588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.924766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.959776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.963595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.968359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.983163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.996510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.996612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.996683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.996812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.011736Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.123651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.124767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.138539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.142550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.142679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.145626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.148077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.148376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.148447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.148491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.148534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.159735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.159804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.159847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.162354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.162440Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.162517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.162650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.171152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.173940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.174133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.175335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.175480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.175536Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.175980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.176069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.176256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.176352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.178521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.178568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.178751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.178809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.179180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.179230Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.179337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.179387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.179435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.179492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.179531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.179589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.179649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.179688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.179755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.181880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.182028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.182074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.182132Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.182183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.182292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : 104:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 2, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T23:13:56.685554Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2024-11-21T23:13:56.685584Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2024-11-21T23:13:56.686363Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2024-11-21T23:13:56.686754Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2024-11-21T23:13:56.686837Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000005 2024-11-21T23:13:56.686880Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T23:13:56.686909Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2024-11-21T23:13:56.686941Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2024-11-21T23:13:56.697789Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:13:56.698110Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:13:56.698299Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:13:56.698487Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T23:13:56.738721Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T23:13:56.738810Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T23:13:56.739539Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:13:56.739620Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T23:13:56.739684Z node 13 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:13:56.793313Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409552, partId: 0 2024-11-21T23:13:56.793534Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000005 2024-11-21T23:13:56.793656Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000005 2024-11-21T23:13:56.793735Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2024-11-21T23:13:56.793789Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId#104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2024-11-21T23:13:56.794023Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T23:13:56.794313Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T23:13:56.797504Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:13:56.798427Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:56.798519Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:13:56.799000Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:56.799074Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:202:2205], at schemeshard: 72057594046678944, txId: 104, path id: 2 2024-11-21T23:13:56.799992Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:13:56.800073Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T23:13:56.800271Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T23:13:56.800335Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:13:56.800416Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2024-11-21T23:13:56.800494Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:13:56.800573Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T23:13:56.800629Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T23:13:56.800942Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2024-11-21T23:13:56.801034Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 1 2024-11-21T23:13:56.801096Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T23:13:56.802011Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:13:56.802165Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:13:56.802228Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2024-11-21T23:13:56.802308Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T23:13:56.802591Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T23:13:56.802739Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2024-11-21T23:13:56.802801Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [13:433:2389] 2024-11-21T23:13:56.807428Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:13:56.807778Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:13:56.807859Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1551:3355] TestWaitNotification: OK eventTxId 104 2024-11-21T23:13:56.832269Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:13:56.832557Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 343us result status StatusSuccess 2024-11-21T23:13:56.833295Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false |84.3%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::PathErrors >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |84.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup |84.4%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> BasicUsage::ConflictingWrites [GOOD] >> BasicUsage::TWriteSession_WriteEncoded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.763246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.763874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.763988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.764057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.764099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.764132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.764193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.768453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.886163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.886219Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.923313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.927718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.927900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.961324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.963888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.968257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.978139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.987205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.987278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.998650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.998750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.998808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.998942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.019460Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.176683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.176884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.177098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.177304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.177356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.182478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.182627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.182825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.182882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.182930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.182962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.186884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.186943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.186979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.191678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.191740Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.191792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.191839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.194718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.198468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.198671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.199790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.199914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.199969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.200289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.200357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.200534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.200604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.202672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.202719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.202874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.202930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.203266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.203326Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.203416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.203462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.203507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.203544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.203595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.203632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.203691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.203738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.203776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.205529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.205661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.205701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.205736Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.205770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.205855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T23:14:00.310047Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2024-11-21T23:14:00.310095Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T23:14:00.344879Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:14:00.345172Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:14:00.345261Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:14:00.345348Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T23:14:00.345458Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T23:14:00.345698Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:14:00.351980Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T23:14:00.352176Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T23:14:00.352661Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:00.352876Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 64424511593 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:00.352977Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T23:14:00.353491Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T23:14:00.353594Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T23:14:00.353938Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:14:00.354056Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:14:00.354138Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:14:00.358807Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:00.358863Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:00.359051Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:14:00.359203Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:00.359248Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:14:00.359296Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:14:00.359652Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:14:00.359724Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:14:00.359951Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:14:00.360020Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:14:00.360110Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:14:00.360185Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:14:00.360283Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:14:00.360343Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:14:00.360572Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:14:00.360657Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T23:14:00.360721Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T23:14:00.360781Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T23:14:00.363416Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:14:00.363544Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:14:00.363602Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:14:00.363694Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:14:00.363766Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:14:00.365222Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:14:00.365325Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:14:00.365360Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:14:00.365398Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:14:00.365438Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:14:00.365541Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:14:00.380476Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:14:00.384991Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:14:00.385425Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T23:14:00.385517Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T23:14:00.386124Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T23:14:00.386282Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:14:00.386350Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [15:404:2372] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2024-11-21T23:14:00.398147Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "Topic1" TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 121 } MeteringMode: METERING_MODE_RESERVED_CAPACITY } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:14:00.398699Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /MyRoot/USER_1/Topic1, opId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:00.399127Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, at schemeshard: 72057594046678944 2024-11-21T23:14:00.402807Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_1/Topic1\', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:00.403144Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_1, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, operation: CREATE PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:14:00.403592Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:14:00.403670Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:14:00.404297Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:14:00.404448Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:14:00.404520Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:411:2379] TestWaitNotification: OK eventTxId 102 >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::ManyDirs >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.763005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.763810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.763875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.763930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.763990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.764043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.764110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.887697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.887752Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.921069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.925031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.925198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.959720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.965087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.968352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.985974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.987337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.987414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.996997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.997088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.997159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.997263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.021288Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.170170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.170384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.170701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.170935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.170984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.176165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.176330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.176515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.176624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.176664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.176697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.178655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.178710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.178759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.180818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.180890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.180938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.181012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.184605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.187202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.187385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.188428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.188558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.188609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.188914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.188981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.189136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.189277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.191808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.191852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.192056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.192099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.192490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.192546Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.192646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.192703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.192762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.192805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.192845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.192887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.192949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.192985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.193019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.194830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.194954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.194991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.195030Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.195068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.195159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Id: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } Children { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:07.135577Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:07.135888Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 345us result status StatusSuccess 2024-11-21T23:14:07.136605Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:07.137640Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:07.137966Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 346us result status StatusSuccess 2024-11-21T23:14:07.138493Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:07.139474Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:07.139752Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 308us result status StatusSuccess 2024-11-21T23:14:07.140088Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:07.141138Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:07.141386Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 301us result status StatusSuccess 2024-11-21T23:14:07.141856Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly |84.4%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |84.4%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |84.4%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.763406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.765208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.765284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.765390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.765437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.765475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.765560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.880100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.880173Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.918582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.923016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.923892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.959975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.963766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.969143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.977559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.996507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.996623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.996683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.996808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.009855Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.164678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.164916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.165126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.165358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.165416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.169268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.169440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.169669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.169741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.169777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.169809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.171839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.171899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.171982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.173595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.173665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.173714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.173767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.177609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.179369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.179547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.180674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.180813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.180870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.181187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.181258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.181432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.181525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.184095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.184142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.184341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.184389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.184789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.184838Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.184957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.185008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.185051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.185095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.185126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.185161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.185217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.185252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.185348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.187196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.187319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.187359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.187399Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.187445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.187536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... /Table1" took 474us result status StatusSuccess 2024-11-21T23:14:08.526014Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/Table1" PathDescription { Self { Name: "Table1" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 16 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:08.533376Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:08.533742Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" took 409us result status StatusSuccess 2024-11-21T23:14:08.534338Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA" PathDescription { Self { Name: "DirA" PathId: 17 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 16 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } } Children { Name: "DirB" PathId: 18 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Table2" PathId: 23 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 17 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:08.535472Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:08.535811Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" took 374us result status StatusSuccess 2024-11-21T23:14:08.556754Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 23 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 23 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:08.558269Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:08.565088Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" took 6.81ms result status StatusSuccess 2024-11-21T23:14:08.565671Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 18 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 17 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "Table3" PathId: 24 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 18 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 18 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:08.577286Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:08.577765Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" took 559us result status StatusSuccess 2024-11-21T23:14:08.578302Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_full/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 24 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000009 ParentPathId: 18 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 27 PathsLimit: 10000 ShardsInside: 15 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 24 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |84.4%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> TSchemeShardTest::CreateSystemColumn [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus >> KqpQueryService::PeriodicTaskInSessionPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.763231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.763854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.763904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.763964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.764010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.764062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.764122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.893371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.893425Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.918495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.922295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.923821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.970173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.975237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.975959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.976287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.981109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.989568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.989660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.998534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.998634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.998698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.998822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.016033Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.166268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.166493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.166692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.166895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.166952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.180752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.180915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.181108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.181171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.181210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.181247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.187171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.187271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.187311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.189027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.189089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.189141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.189205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.199434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.201444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.201619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.202745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.202859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.202906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.203202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.203269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.203418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.203493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.205359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.205400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.205548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.205588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.205897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.205943Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.206036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.206092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.206140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.206189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.206235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.206270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.206336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.206375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.206429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.208153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.208250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.208284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.208317Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.208354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.208456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 6678944] TDone opId# 102:0 ProgressState 2024-11-21T23:14:12.889523Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:14:12.889600Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:12.889693Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:14:12.889834Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:342:2317] message: TxId: 102 2024-11-21T23:14:12.889948Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:12.890038Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:14:12.890109Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:14:12.890353Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:14:12.895734Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:14:12.895848Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:343:2318] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2024-11-21T23:14:12.900135Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:14:12.900616Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:14:12.901430Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:14:12.901566Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T23:14:12.901643Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:14:12.901730Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:14:12.901875Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:14:12.902148Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:14:12.903091Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:14:12.903189Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:14:12.907176Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2024-11-21T23:14:12.907472Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2024-11-21T23:14:12.907857Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:12.907935Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:12.908236Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:14:12.908374Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:12.908466Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 1 2024-11-21T23:14:12.908546Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T23:14:12.909237Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:14:12.909358Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet72057594046678944 2024-11-21T23:14:12.909777Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T23:14:12.911191Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:14:12.911379Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:14:12.911463Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:14:12.911577Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T23:14:12.911661Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:14:12.913093Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:14:12.913189Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:14:12.913223Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:14:12.913262Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2024-11-21T23:14:12.913302Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:14:12.913399Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T23:14:12.917458Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2024-11-21T23:14:12.917740Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2024-11-21T23:14:12.917834Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T23:14:12.918731Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2024-11-21T23:14:12.962950Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2024-11-21T23:14:12.963283Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T23:14:12.963370Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2024-11-21T23:14:12.963621Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T23:14:12.963707Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2024-11-21T23:14:12.963844Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2024-11-21T23:14:12.964011Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2024-11-21T23:14:12.969137Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:14:12.970002Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:14:12.980329Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:14:12.980632Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:14:12.980735Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2024-11-21T23:14:12.980857Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2024-11-21T23:14:13.013413Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2024-11-21T23:14:13.013693Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2024-11-21T23:14:13.013860Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2024-11-21T23:14:13.013903Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> GroupWriteTest::WithRead >> TxUsage::WriteToTopic_Demo_44 [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> GroupWriteTest::TwoTables |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |84.5%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> TSchemeShardTest::CreateDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] 2024-11-21T23:12:27.512003Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:27.512082Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:147:2057] recipient: [1:145:2168] Leader for TabletID 72057594037927938 is [1:151:2172] sender: [1:152:2057] recipient: [1:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:177:2057] recipient: [1:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:27.558727Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:27.576204Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2024-11-21T23:12:27.577335Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2024-11-21T23:12:27.579959Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:184:2197] 2024-11-21T23:12:27.582200Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:185:2198] 2024-11-21T23:12:27.584110Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:27.592343Z node 1 :PERSQUEUE INFO: new Cookie default|c7857223-99836a23-8d414350-8d8c9432_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:27.600762Z node 1 :PERSQUEUE INFO: new Cookie default|6b466aa4-7b2889e8-97fcc4fb-ceeac385_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:27.636865Z node 1 :PERSQUEUE INFO: new Cookie default|3cad9ebd-9dfee66e-84bd939c-b48a0d3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:27.644741Z node 1 :PERSQUEUE INFO: new Cookie default|60c7791a-26d5e151-fdc8b316-e23d6409_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:27.651087Z node 1 :PERSQUEUE INFO: new Cookie default|95e7a4df-4ac6316e-53fda343-464832b8_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:27.660385Z node 1 :PERSQUEUE INFO: new Cookie default|1e950f76-4811487c-9f7def28-435c0a3c_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] 2024-11-21T23:12:28.180216Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:28.180299Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:147:2057] recipient: [2:145:2168] Leader for TabletID 72057594037927938 is [2:151:2172] sender: [2:152:2057] recipient: [2:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Captured TEvents::TSystem::Wakeup to SAUSAGE_CACHE Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:179:2057] recipient: [2:97:2132] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:182:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:183:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to KEYVALUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:28.250327Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:28.250451Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:184:2194] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:261:2057] recipient: [2:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:12:29.943522Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:12:29.944685Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2024-11-21T23:12:29.945834Z node 2 :PERSQUEU ... Kikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to BS_GROUP_PROXY_MON Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Captured TEvents::TSystem::Wakeup to TABLET_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to PDISK_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_DISK_SPACE_TRACKER Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to BS_ASYNC_LEVEL_INDEX Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [47:290:2283] sender: [47:390:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:101:2057] recipient: [48:99:2133] Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:106:2057] recipient: [48:99:2133] 2024-11-21T23:14:13.319372Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:14:13.319467Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:147:2057] recipient: [48:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:147:2057] recipient: [48:145:2168] Leader for TabletID 72057594037927938 is [48:151:2172] sender: [48:152:2057] recipient: [48:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [48:105:2137] sender: [48:177:2057] recipient: [48:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:13.400492Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:14:13.401731Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2024-11-21T23:14:13.415393Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:184:2197] 2024-11-21T23:14:13.425032Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:184:2197] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR 2024-11-21T23:14:13.433057Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:185:2198] 2024-11-21T23:14:13.444477Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:13.497655Z node 48 :PERSQUEUE INFO: new Cookie default|9bbeefb0-f2268393-355710cf-32574309_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:13.542424Z node 48 :PERSQUEUE INFO: new Cookie default|11488c1-75e75ad8-7f269943-53f0fe28_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:13.706232Z node 48 :PERSQUEUE INFO: new Cookie default|8d0d1f10-4630c3d-78698c58-45512090_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:13.735938Z node 48 :PERSQUEUE INFO: new Cookie default|ad814875-f038cdd7-dd2c4af0-9c92fb1b_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:13.776065Z node 48 :PERSQUEUE INFO: new Cookie default|345ef089-ba89c4f7-f0ee26bb-450b85bc_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:13.806112Z node 48 :PERSQUEUE INFO: new Cookie default|762590b5-4db372ac-4ce812ac-f4455f46_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:101:2057] recipient: [49:99:2133] Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:106:2057] recipient: [49:99:2133] 2024-11-21T23:14:14.918680Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:14:14.918773Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:147:2057] recipient: [49:145:2168] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:147:2057] recipient: [49:145:2168] Leader for TabletID 72057594037927938 is [49:151:2172] sender: [49:152:2057] recipient: [49:145:2168] Captured TEvents::TSystem::Wakeup to SS_PROXY_REQUEST Leader for TabletID 72057594037927937 is [49:105:2137] sender: [49:177:2057] recipient: [49:14:2061] Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_QUEUE_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:14.984968Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:14:14.986096Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2024-11-21T23:14:14.987524Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:184:2197] 2024-11-21T23:14:14.995231Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:184:2197] 2024-11-21T23:14:14.997569Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:185:2198] 2024-11-21T23:14:15.005515Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:185:2198] Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to NKikimr::NPQ::TPartitionQuoterBase Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to PERSQUEUE_PARTITION_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:15.040194Z node 49 :PERSQUEUE INFO: new Cookie default|ea44c024-a08926f-85060e5c-69acc4e1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:15.075552Z node 49 :PERSQUEUE INFO: new Cookie default|76a55974-5b28d1c2-bd9095fa-b34069e9_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:15.179220Z node 49 :PERSQUEUE INFO: new Cookie default|b32e1cec-b8a17920-3e183a6e-d0d43e80_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:15.193981Z node 49 :PERSQUEUE INFO: new Cookie default|a3804514-14382321-2516a93f-5c8d4cfe_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:15.207373Z node 49 :PERSQUEUE INFO: new Cookie default|ac76815f-6052e4f1-d4745ad4-3bf2ab79_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR 2024-11-21T23:14:15.221405Z node 49 :PERSQUEUE INFO: new Cookie default|7935d7e3-ef70ca33-f78e2f5f-9cfeb78f_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR Captured TEvents::TSystem::Wakeup to BS_PROXY_PUT_ACTOR >> KqpDocumentApi::AllowRead >> KqpQueryService::DdlUser |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects >> KqpQueryService::TableSink_ReplaceDuplicatesOlap >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] |84.5%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> LdapAuthProviderTest::LdapServerIsUnavailable |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |84.5%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |84.5%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_44 [GOOD] Test command err: 2024-11-21T23:10:35.476963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873928459026556:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:35.477233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:10:35.651217Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bc4/r3tmp/tmpJsZjUE/pdisk_1.dat 2024-11-21T23:10:35.931654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:35.931755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:35.933501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:35.954150Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17200, node 1 2024-11-21T23:10:35.973972Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:10:35.974008Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:10:36.030220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002bc4/r3tmp/yandexrSjWrV.tmp 2024-11-21T23:10:36.030245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002bc4/r3tmp/yandexrSjWrV.tmp 2024-11-21T23:10:36.030442Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002bc4/r3tmp/yandexrSjWrV.tmp 2024-11-21T23:10:36.030541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:36.077036Z INFO: TTestServer started on Port 22563 GrpcPort 17200 TClient is connected to server localhost:22563 PQClient connected to localhost:17200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:36.419552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:36.432965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:10:36.461256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:10:36.629282Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:10:39.020265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873945638896392:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:39.020288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873945638896368:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:39.020385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:39.023764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:39.032562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873945638896397:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:39.252269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:39.272933Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873945638896469:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:39.274675Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTBjOGE0MDgtZmRhOTA3ZjAtODc2YTk4MmEtMjUyYjljMWQ=, ActorId: [1:7439873945638896365:2307], ActorState: ExecuteState, TraceId: 01jd8fvvd34zwsagc0y2e5kwje, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:39.276768Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:39.285010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:39.340549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873945638896739:2616] 2024-11-21T23:10:40.450532Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873928459026556:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:40.462878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:45.842771Z :WriteToTopic_Two_WriteSession INFO: TTopicSdkTestSetup started 2024-11-21T23:10:45.863189Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:45.889918Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873971408700846:2808] connected; active server actors: 1 2024-11-21T23:10:45.890249Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:45.890851Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:45.890992Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:45.892034Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:45.896934Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:45.897655Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:45.897913Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:45.898092Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:45.898129Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:45.898144Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:45.898164Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:45.898191Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:45.898211Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:45.898224Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:45.903420Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:45.903472Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:45.903526Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873971408700867:2437], now have 1 active actors on pipe 2024-11-21T23:10:45.929487Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:45.929537Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873971408700845:2807], now have 1 active actors on pipe 2024-11-21T23:10:45.945227Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 7439873928459026786 RawX2: 4294969468 } TxId: 281474976710673 Config { TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChan ... 14.993267Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (94-94) 2024-11-21T23:14:14.993296Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:14:14.993329Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (95-95) 2024-11-21T23:14:14.993351Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:14:14.993381Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (96-96) 2024-11-21T23:14:14.993404Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:14:14.993436Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (97-97) 2024-11-21T23:14:14.993460Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:14:14.993509Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (98-98) 2024-11-21T23:14:14.993532Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:14:14.993569Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (99-99) 2024-11-21T23:14:14.993594Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:14:14.993950Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (92-92) 2024-11-21T23:14:14.993999Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (93-93) 2024-11-21T23:14:14.994033Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (94-94) 2024-11-21T23:14:14.994067Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 0} (95-95) 2024-11-21T23:14:14.994104Z :DEBUG: [/Root] Take Data. Partition 0. Read: {4, 0} (96-96) 2024-11-21T23:14:14.994135Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 0} (97-97) 2024-11-21T23:14:14.994165Z :DEBUG: [/Root] Take Data. Partition 0. Read: {6, 0} (98-98) 2024-11-21T23:14:14.994199Z :DEBUG: [/Root] Take Data. Partition 0. Read: {7, 0} (99-99) 2024-11-21T23:14:14.994253Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] The application data is transferred to the client. Number of messages 8, size 8000000 bytes 2024-11-21T23:14:14.994293Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Returning serverBytesSize = 0 to budget 0 8 2024-11-21T23:14:14.994476Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Commit offsets [92, 100). Partition stream id: 1 2024-11-21T23:14:14.994839Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 grpc read done: success# 1, data# { read_request { bytes_size: 8001512 } } 2024-11-21T23:14:14.995005Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 got read request: guid# 1eb8ad1c-8398e4f2-4ae66edd-e8f9901d 2024-11-21T23:14:14.995761Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { start: 92 end: 100 } } } } 2024-11-21T23:14:14.995943Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 100 prev 92 end 100 by cookie 8 2024-11-21T23:14:14.996195Z node 9 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2024-11-21T23:14:14.996258Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2024-11-21T23:14:14.996410Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 100 (startOffset 0) session test-consumer_9_1_12041919982368819710_v1 2024-11-21T23:14:14.996582Z node 9 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:14:14.997815Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 100 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:14:14.997882Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:14:14.997936Z node 9 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 8 2024-11-21T23:14:14.998066Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 8 } 2024-11-21T23:14:14.998110Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 100 endOffset 100 with cookie 8 2024-11-21T23:14:14.998159Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 replying for commits: assignId# 1, from# 8, to# 8, offset# 100 2024-11-21T23:14:15.003872Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 100 } } 2024-11-21T23:14:15.269280Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 checking auth because of timeout 2024-11-21T23:14:15.269374Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 auth for : test-consumer 2024-11-21T23:14:15.270279Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 Handle describe topics response 2024-11-21T23:14:15.270423Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 auth is DEAD 2024-11-21T23:14:15.270514Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 auth ok: topics# 1, initDone# 1 2024-11-21T23:14:15.755407Z :INFO: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] Closing read session. Close timeout: 0.000000s 2024-11-21T23:14:15.755476Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:99:100 2024-11-21T23:14:15.755533Z :INFO: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] Counters: { Errors: 0 CurrentSessionLifetimeMs: 65086 BytesRead: 100000000 MessagesRead: 100 BytesReadCompressed: 100000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:14:15.755650Z :NOTICE: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:14:15.755702Z :DEBUG: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] [] Abort session to cluster 2024-11-21T23:14:15.756329Z :NOTICE: [/Root] [/Root] [389a697-f479b8-81d0febe-825b0ba7] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:14:15.759723Z node 9 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 grpc read done: success# 0, data# { } 2024-11-21T23:14:15.759766Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 grpc read failed 2024-11-21T23:14:15.759803Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 grpc closed 2024-11-21T23:14:15.759850Z node 9 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_9_1_12041919982368819710_v1 is DEAD 2024-11-21T23:14:15.761663Z node 9 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [9:7439874595508827053:2549] disconnected; active server actors: 1 2024-11-21T23:14:15.761703Z node 9 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [9:7439874595508827053:2549] client test-consumer disconnected session test-consumer_9_1_12041919982368819710_v1 2024-11-21T23:14:15.761818Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:14:15.761840Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_9_1_12041919982368819710_v1 2024-11-21T23:14:15.761870Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439874595508827056:2552] destroyed 2024-11-21T23:14:15.761920Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_9_1_12041919982368819710_v1 2024-11-21T23:14:15.763410Z :INFO: [/Root] SessionId [test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T23:14:15.763459Z :INFO: [/Root] SessionId [test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T23:14:15.763527Z :DEBUG: [/Root] SessionId [test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T23:14:15.775256Z :INFO: [/Root] SessionId [test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T23:14:15.775311Z :DEBUG: [/Root] SessionId [test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T23:14:15.783373Z node 9 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0 grpc read done: success: 0 data: 2024-11-21T23:14:15.783413Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0 grpc read failed 2024-11-21T23:14:15.783446Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0 grpc closed 2024-11-21T23:14:15.783466Z node 9 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message_group_id|e1faa9ba-847ed8e1-b37847a9-cce17c1_0 is DEAD 2024-11-21T23:14:15.784416Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:14:15.784492Z node 9 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:14:15.786678Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:14:15.786722Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439874578328957581:2485] destroyed 2024-11-21T23:14:15.786763Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:14:15.786788Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [9:7439874578328957578:2485] destroyed 2024-11-21T23:14:15.787733Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:14:15.798850Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:14:16.424579Z node 9 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvPartitionCounters PartitionId 0 >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |84.5%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false |84.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> TargetDiscoverer::SystemObjects [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost |84.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2024-11-21T23:14:18.822183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874884580410683:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:18.823161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001d17/r3tmp/tmpaR2l4q/pdisk_1.dat 2024-11-21T23:14:19.312109Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:19.335411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:19.335538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:19.345070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15717 TServer::EnableGrpc on GrpcPort 10992, node 1 2024-11-21T23:14:19.822617Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:19.822652Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:19.822662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:19.822777Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:20.281474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:20.304406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:20.324141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:20.511219Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:14:20.515136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:20.652383Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732230860339, tx_id: 1 } } } 2024-11-21T23:14:20.652420Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2024-11-21T23:14:20.683261Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732230860451, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1732230860549, tx_id: 281474976710659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2024-11-21T23:14:20.683298Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2024-11-21T23:14:22.610533Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732230860451, tx_id: 281474976710658 } } } 2024-11-21T23:14:22.610560Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2024-11-21T23:14:22.610575Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> JsonChangeRecord::Heartbeat [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlSecret >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.763250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.764739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.764805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.764863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.764910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.764959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.765016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.893199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.893260Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.927024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.931357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.931603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.961877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.963715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.968589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.979552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.986076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.986150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.997639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.997729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.997811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.997924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.008608Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.168108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.168298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.168508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.168738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.168800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.171629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.171836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.172065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.172129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.172177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.172212Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.176970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.177045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.177112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.180724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.180802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.180860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.180938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.184753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.187148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.187383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.188376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.188509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.188614Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.188983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.189051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.189224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.189325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.194421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.194487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.194653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.194700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.195048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.195115Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.195232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.195278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.195326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.195364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.195395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.195431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.195502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.195541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.195590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.197540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.197646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.197683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.197720Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.197756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.197857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T23:14:23.322040Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:14:23.322125Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:14:23.322166Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:14:23.322233Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T23:14:23.322303Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:14:23.323232Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:14:23.323315Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:14:23.323349Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:14:23.323376Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T23:14:23.323405Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:14:23.323477Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T23:14:23.326645Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:14:23.326725Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:14:23.326757Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:14:23.326789Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:14:23.329260Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2024-11-21T23:14:23.329637Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2024-11-21T23:14:23.330088Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:14:23.330514Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:14:23.330692Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409548 2024-11-21T23:14:23.331812Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:23.332061Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:14:23.333676Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2024-11-21T23:14:23.333948Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409546 2024-11-21T23:14:23.334954Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2024-11-21T23:14:23.335217Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:14:23.335517Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T23:14:23.335734Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T23:14:23.335914Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:14:23.336533Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:14:23.336625Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:14:23.336733Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2024-11-21T23:14:23.342369Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T23:14:23.342628Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2024-11-21T23:14:23.343588Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T23:14:23.343630Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T23:14:23.343830Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2024-11-21T23:14:23.343865Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2024-11-21T23:14:23.344429Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T23:14:23.344512Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T23:14:23.344763Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:14:23.345086Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:14:23.345159Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:14:23.345840Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:14:23.345971Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:14:23.346030Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:527:2482] TestWaitNotification: OK eventTxId 103 2024-11-21T23:14:23.346733Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:23.346989Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 315us result status StatusPathDoesNotExist 2024-11-21T23:14:23.347185Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2024-11-21T23:14:23.347783Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T23:14:23.347926Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2024-11-21T23:14:23.347974Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2024-11-21T23:14:23.348065Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2024-11-21T23:14:23.348714Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:23.348957Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 267us result status StatusSuccess 2024-11-21T23:14:23.349389Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> TMonitoringTests::InvalidActorId |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> ControlImplementationTests::TestRegisterLocalControl [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> GroupWriteTest::TwoTables [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> ControlImplementationTests::TestRegisterLocalControl [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> ControlImplementationTests::TestControlWrapperBounds [GOOD] >> KikimrIcGateway::TestLoadExternalTable >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> BasicUsage::TWriteSession_WriteEncoded [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 17614846306266947587 2024-11-21T23:14:17.331318Z 1 00h00m00.001536s :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.296064s 2024-11-21T23:14:17.333266Z 1 00h00m00.001536s :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.298033s 2024-11-21T23:14:19.316482Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T23:14:19.316589Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T23:14:19.356277Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T23:14:19.356359Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T23:14:19.356473Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T23:14:19.356511Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T23:14:19.361073Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T23:14:19.361198Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T23:14:19.415100Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:19.415221Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:19.421067Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T23:14:19.421178Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T23:14:28.918136Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T23:14:28.918249Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:28.918315Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:28.918351Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T23:14:28.918410Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:28.918455Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:28.918490Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T23:14:28.918529Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:28.918572Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:28.960547Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2024-11-21T23:14:28.960661Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2024-11-21T23:14:28.960713Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2024-11-21T23:14:28.960762Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2024-11-21T23:14:28.960811Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2024-11-21T23:14:28.960855Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> ControlImplementationTests::TestControlWrapperBounds [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> KikimrIcGateway::TestListPath >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> TReplicationTests::CreateSequential >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> GroupWriteTest::WithRead [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> TReplicationTests::Create >> KqpQueryService::TableSink_ReplaceDuplicatesOlap [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 15492519983094957052 2024-11-21T23:14:17.332135Z 1 00h00m00.001536s :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.297369s 2024-11-21T23:14:17.332302Z 1 00h00m00.001536s :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.297563s 2024-11-21T23:14:18.856859Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2024-11-21T23:14:19.027242Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2024-11-21T23:14:19.027322Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2024-11-21T23:14:19.069089Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2024-11-21T23:14:19.154960Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:19.163008Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2024-11-21T23:14:31.967829Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T23:14:31.967937Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:31.967997Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2024-11-21T23:14:31.968035Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2024-11-21T23:14:32.102493Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2024-11-21T23:14:32.102599Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |84.6%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> TConsoleTests::TestRestartConsoleAndPools >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConsoleTests::TestGetUnknownTenantStatus >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TReplicationTests::Create [GOOD] >> TReplicationTests::CreateDropRecreate >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpQueryService::MaterializeTxResults >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::Alter >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:101:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:106:2057] recipient: [1:99:2133] Leader for TabletID 72057594037927937 is [1:105:2137] sender: [1:139:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:101:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:106:2057] recipient: [2:99:2133] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:139:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:141:2057] recipient: [2:97:2132] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:144:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:105:2137] sender: [2:145:2057] recipient: [2:143:2166] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:147:2057] recipient: [2:143:2166] !Reboot 72057594037927937 (actor [2:105:2137]) rebooted! !Reboot 72057594037927937 (actor [2:105:2137]) tablet resolver refreshed! new actor is[2:146:2167] Leader for TabletID 72057594037927937 is [2:146:2167] sender: [2:216:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:101:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:106:2057] recipient: [3:99:2133] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:139:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:141:2057] recipient: [3:97:2132] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:143:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:105:2137] sender: [3:145:2057] recipient: [3:144:2166] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:147:2057] recipient: [3:144:2166] !Reboot 72057594037927937 (actor [3:105:2137]) rebooted! !Reboot 72057594037927937 (actor [3:105:2137]) tablet resolver refreshed! new actor is[3:146:2167] Leader for TabletID 72057594037927937 is [3:146:2167] sender: [3:216:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:101:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:106:2057] recipient: [4:99:2133] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:139:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:143:2057] recipient: [4:97:2132] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:145:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:105:2137] sender: [4:147:2057] recipient: [4:146:2167] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:149:2057] recipient: [4:146:2167] !Reboot 72057594037927937 (actor [4:105:2137]) rebooted! !Reboot 72057594037927937 (actor [4:105:2137]) tablet resolver refreshed! new actor is[4:148:2168] Leader for TabletID 72057594037927937 is [4:148:2168] sender: [4:218:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:101:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:106:2057] recipient: [5:99:2133] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:139:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:148:2057] recipient: [5:97:2132] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:151:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:105:2137] sender: [5:152:2057] recipient: [5:150:2172] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:154:2057] recipient: [5:150:2172] !Reboot 72057594037927937 (actor [5:105:2137]) rebooted! !Reboot 72057594037927937 (actor [5:105:2137]) tablet resolver refreshed! new actor is[5:153:2173] Leader for TabletID 72057594037927937 is [5:153:2173] sender: [5:223:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:101:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:106:2057] recipient: [6:99:2133] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:139:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:148:2057] recipient: [6:97:2132] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:151:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:105:2137] sender: [6:152:2057] recipient: [6:150:2172] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:154:2057] recipient: [6:150:2172] !Reboot 72057594037927937 (actor [6:105:2137]) rebooted! !Reboot 72057594037927937 (actor [6:105:2137]) tablet resolver refreshed! new actor is[6:153:2173] Leader for TabletID 72057594037927937 is [6:153:2173] sender: [6:223:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:101:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:106:2057] recipient: [7:99:2133] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:139:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:151:2057] recipient: [7:97:2132] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:154:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:105:2137] sender: [7:155:2057] recipient: [7:153:2174] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:157:2057] recipient: [7:153:2174] !Reboot 72057594037927937 (actor [7:105:2137]) rebooted! !Reboot 72057594037927937 (actor [7:105:2137]) tablet resolver refreshed! new actor is[7:156:2175] Leader for TabletID 72057594037927937 is [7:156:2175] sender: [7:226:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:101:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:106:2057] recipient: [8:99:2133] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:139:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:153:2057] recipient: [8:97:2132] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:155:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:105:2137] sender: [8:157:2057] recipient: [8:156:2176] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:159:2057] recipient: [8:156:2176] !Reboot 72057594037927937 (actor [8:105:2137]) rebooted! !Reboot 72057594037927937 (actor [8:105:2137]) tablet resolver refreshed! new actor is[8:158:2177] Leader for TabletID 72057594037927937 is [8:158:2177] sender: [8:228:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:101:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:106:2057] recipient: [9:99:2133] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:139:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:105:2137]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:153:2057] recipient: [9:97:2132] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:156:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:105:2137] sender: [9:157:2057] recipient: [9:155:2176] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:159:2057] recipient: [9:155:2176] !Reboot 72057594037927937 (actor [9:105:2137]) rebooted! !Reboot 72057594037927937 (actor [9:105:2137]) tablet resolver refreshed! new actor is[9:158:2177] Leader for TabletID 72057594037927937 is [9:158:2177] sender: [9:228:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:101:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:106:2057] recipient: [10:99:2133] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:139:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:155:2057] recipient: [10:97:2132] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:157:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:105:2137] sender: [10:159:2057] recipient: [10:158:2177] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:161:2057] recipient: [10:158:2177] !Reboot 72057594037927937 (actor [10:105:2137]) rebooted! !Reboot 72057594037927937 (actor [10:105:2137]) tablet resolver refreshed! new actor is[10:160:2178] Leader for TabletID 72057594037927937 is [10:160:2178] sender: [10:230:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:101:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:106:2057] recipient: [11:99:2133] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:139:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:105:2137]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:158:2057] recipient: [11:97:2132] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:161:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:105:2137] sender: [11:162:2057] recipient: [11:160:2180] Leader for TabletID 72057594037927937 is [11:163:2181] sender: [11:164:2057] recipient: [11:160:2180] !Reboot 72057594037927937 (actor [11:105:2137]) rebooted! !Reboot 72057594037927937 (actor [11:105:2137]) tablet resolver refreshed! new actor is[11:163:2181] Leader for TabletID 72057594037927937 is [11:163:2181] sender: [11:216:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:101:2057] recipient: [12:99:2133] Leader for TabletID 7 ... etID 72057594037927937 is [106:105:2137] sender: [106:106:2057] recipient: [106:99:2133] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:139:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:105:2137]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:234:2057] recipient: [106:97:2132] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:237:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:105:2137] sender: [106:238:2057] recipient: [106:236:2246] Leader for TabletID 72057594037927937 is [106:239:2247] sender: [106:240:2057] recipient: [106:236:2246] !Reboot 72057594037927937 (actor [106:105:2137]) rebooted! !Reboot 72057594037927937 (actor [106:105:2137]) tablet resolver refreshed! new actor is[106:239:2247] Leader for TabletID 72057594037927937 is [106:239:2247] sender: [106:292:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:101:2057] recipient: [107:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:101:2057] recipient: [107:99:2133] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:106:2057] recipient: [107:99:2133] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:139:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:239:2057] recipient: [107:97:2132] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:242:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:105:2137] sender: [107:243:2057] recipient: [107:241:2251] Leader for TabletID 72057594037927937 is [107:244:2252] sender: [107:245:2057] recipient: [107:241:2251] !Reboot 72057594037927937 (actor [107:105:2137]) rebooted! !Reboot 72057594037927937 (actor [107:105:2137]) tablet resolver refreshed! new actor is[107:244:2252] Leader for TabletID 72057594037927937 is [107:244:2252] sender: [107:314:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:101:2057] recipient: [108:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:101:2057] recipient: [108:99:2133] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:106:2057] recipient: [108:99:2133] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:139:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:239:2057] recipient: [108:97:2132] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:242:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:105:2137] sender: [108:243:2057] recipient: [108:241:2251] Leader for TabletID 72057594037927937 is [108:244:2252] sender: [108:245:2057] recipient: [108:241:2251] !Reboot 72057594037927937 (actor [108:105:2137]) rebooted! !Reboot 72057594037927937 (actor [108:105:2137]) tablet resolver refreshed! new actor is[108:244:2252] Leader for TabletID 72057594037927937 is [108:244:2252] sender: [108:314:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:101:2057] recipient: [109:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:101:2057] recipient: [109:99:2133] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:106:2057] recipient: [109:99:2133] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:139:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:240:2057] recipient: [109:97:2132] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:243:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:105:2137] sender: [109:244:2057] recipient: [109:242:2251] Leader for TabletID 72057594037927937 is [109:245:2252] sender: [109:246:2057] recipient: [109:242:2251] !Reboot 72057594037927937 (actor [109:105:2137]) rebooted! !Reboot 72057594037927937 (actor [109:105:2137]) tablet resolver refreshed! new actor is[109:245:2252] Leader for TabletID 72057594037927937 is [109:245:2252] sender: [109:293:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:101:2057] recipient: [110:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:101:2057] recipient: [110:99:2133] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:106:2057] recipient: [110:99:2133] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:139:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:242:2057] recipient: [110:97:2132] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:245:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:105:2137] sender: [110:246:2057] recipient: [110:244:2253] Leader for TabletID 72057594037927937 is [110:247:2254] sender: [110:248:2057] recipient: [110:244:2253] !Reboot 72057594037927937 (actor [110:105:2137]) rebooted! !Reboot 72057594037927937 (actor [110:105:2137]) tablet resolver refreshed! new actor is[110:247:2254] Leader for TabletID 72057594037927937 is [110:247:2254] sender: [110:317:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:101:2057] recipient: [111:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:101:2057] recipient: [111:99:2133] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:106:2057] recipient: [111:99:2133] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:139:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:105:2137]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:242:2057] recipient: [111:97:2132] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:245:2057] recipient: [111:244:2253] Leader for TabletID 72057594037927937 is [111:105:2137] sender: [111:246:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:247:2254] sender: [111:248:2057] recipient: [111:244:2253] !Reboot 72057594037927937 (actor [111:105:2137]) rebooted! !Reboot 72057594037927937 (actor [111:105:2137]) tablet resolver refreshed! new actor is[111:247:2254] Leader for TabletID 72057594037927937 is [111:247:2254] sender: [111:317:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:101:2057] recipient: [112:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:101:2057] recipient: [112:99:2133] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:106:2057] recipient: [112:99:2133] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:139:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:105:2137]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:243:2057] recipient: [112:97:2132] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:246:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:105:2137] sender: [112:247:2057] recipient: [112:245:2253] Leader for TabletID 72057594037927937 is [112:248:2254] sender: [112:249:2057] recipient: [112:245:2253] !Reboot 72057594037927937 (actor [112:105:2137]) rebooted! !Reboot 72057594037927937 (actor [112:105:2137]) tablet resolver refreshed! new actor is[112:248:2254] Leader for TabletID 72057594037927937 is [112:248:2254] sender: [112:318:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:101:2057] recipient: [113:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:101:2057] recipient: [113:99:2133] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:106:2057] recipient: [113:99:2133] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:139:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:105:2137]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:248:2057] recipient: [113:97:2132] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:250:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:105:2137] sender: [113:252:2057] recipient: [113:251:2258] Leader for TabletID 72057594037927937 is [113:253:2259] sender: [113:254:2057] recipient: [113:251:2258] !Reboot 72057594037927937 (actor [113:105:2137]) rebooted! !Reboot 72057594037927937 (actor [113:105:2137]) tablet resolver refreshed! new actor is[113:253:2259] Leader for TabletID 72057594037927937 is [113:253:2259] sender: [113:323:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:101:2057] recipient: [114:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:101:2057] recipient: [114:99:2133] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:106:2057] recipient: [114:99:2133] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:139:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:105:2137]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:248:2057] recipient: [114:97:2132] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:251:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:105:2137] sender: [114:252:2057] recipient: [114:250:2258] Leader for TabletID 72057594037927937 is [114:253:2259] sender: [114:254:2057] recipient: [114:250:2258] !Reboot 72057594037927937 (actor [114:105:2137]) rebooted! !Reboot 72057594037927937 (actor [114:105:2137]) tablet resolver refreshed! new actor is[114:253:2259] Leader for TabletID 72057594037927937 is [114:253:2259] sender: [114:323:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:101:2057] recipient: [115:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:101:2057] recipient: [115:99:2133] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:106:2057] recipient: [115:99:2133] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:139:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:105:2137]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:249:2057] recipient: [115:97:2132] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:252:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:105:2137] sender: [115:253:2057] recipient: [115:251:2258] Leader for TabletID 72057594037927937 is [115:254:2259] sender: [115:255:2057] recipient: [115:251:2258] !Reboot 72057594037927937 (actor [115:105:2137]) rebooted! !Reboot 72057594037927937 (actor [115:105:2137]) tablet resolver refreshed! new actor is[115:254:2259] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:101:2057] recipient: [116:99:2133] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:101:2057] recipient: [116:99:2133] Leader for TabletID 72057594037927937 is [116:105:2137] sender: [116:106:2057] recipient: [116:99:2133] Leader for TabletID 72057594037927937 is [116:105:2137] sender: [116:139:2057] recipient: [116:14:2061] >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.763076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.763838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.763905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.763978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.764046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.764076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.764142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.884390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.884441Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.919923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.923845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.924007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.962222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.963538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.968355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.982076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.997270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.997410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.997476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.997582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.020760Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.145851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.146106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.146352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.146601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.146651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.156480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.156687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.156942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.157037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.157087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.157131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.163095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.163164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.163209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.165547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.165642Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.165740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.165790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.171601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.176453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.176666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.177696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.177860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.177918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.178282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.178351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.178560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.178649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.181134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.181203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.181415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.181465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.181911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.181966Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.182071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.182131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.182188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.182255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.182304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.182356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.182439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.182489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.182525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.184397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.184502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.184543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.184581Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.184619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.184714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... erTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:33.331277Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:14:33.331864Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 656us result status StatusSuccess 2024-11-21T23:14:33.333290Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:33.364009Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:14:33.364536Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 587us result status StatusSuccess 2024-11-21T23:14:33.365919Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 3 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1472 DataSize: 1472 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_topic/ut/unittest >> BasicUsage::TWriteSession_WriteEncoded [GOOD] Test command err: 2024-11-21T23:10:35.283905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873928588917944:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:35.283953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002be0/r3tmp/tmpCz4gtV/pdisk_1.dat 2024-11-21T23:10:35.606503Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:10:35.818557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:10:35.818696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:10:35.820305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:10:35.841594Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24240, node 1 2024-11-21T23:10:35.953793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002be0/r3tmp/yandexcoQUHw.tmp 2024-11-21T23:10:35.953823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002be0/r3tmp/yandexcoQUHw.tmp 2024-11-21T23:10:35.954029Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002be0/r3tmp/yandexcoQUHw.tmp 2024-11-21T23:10:35.954168Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:10:36.010615Z INFO: TTestServer started on Port 32138 GrpcPort 24240 TClient is connected to server localhost:32138 PQClient connected to localhost:24240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:10:36.466215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:36.485035Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:10:36.504985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:10:36.512078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:10:36.666032Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:10:38.757137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873941473820404:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:38.757153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439873941473820383:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:38.757335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:10:38.761372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:10:38.773454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439873941473820421:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:10:38.966440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:10:38.979198Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873941473820500:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:10:38.980885Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQ3MmI4MTgtMWRjMzBmZmEtZDQ1ZTM0NjEtMjdlOGI5NWE=, ActorId: [1:7439873941473820380:2306], ActorState: ExecuteState, TraceId: 01jd8fvv4nf63hsd8927sdpfg0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:10:38.982978Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:10:39.039919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:10:39.115392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439873945768788071:2622] 2024-11-21T23:10:40.284892Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873928588917944:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:10:40.284972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:10:45.492897Z :ConnectToYDB INFO: TTopicSdkTestSetup started 2024-11-21T23:10:45.508645Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:10:45.526238Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7439873971538592179:2815] connected; active server actors: 1 2024-11-21T23:10:45.529232Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2024-11-21T23:10:45.530027Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2024-11-21T23:10:45.530155Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2024-11-21T23:10:45.530721Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2024-11-21T23:10:45.539566Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2024-11-21T23:10:45.539850Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2024-11-21T23:10:45.540422Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2024-11-21T23:10:45.540625Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2024-11-21T23:10:45.540652Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2024-11-21T23:10:45.540668Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2024-11-21T23:10:45.540683Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2024-11-21T23:10:45.540717Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:10:45.540751Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] empty tx queue 2024-11-21T23:10:45.540775Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2024-11-21T23:10:45.540974Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2024-11-21T23:10:45.541006Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:45.541069Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873971538592211:2437], now have 1 active actors on pipe 2024-11-21T23:10:45.569162Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:10:45.569200Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7439873971538592178:2814], now have 1 active actors on pipe 2024-11-21T23:10:45.569251Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvUpdateConfig 2024-11-21T23:10:45.599372Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Config update version 0(current 0) received from actor [1:7439873928588918136:2193] txId 281474976710673 config: PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: ... rom client: partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2024-11-21T23:14:29.457916Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 Start reading TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2024-11-21T23:14:29.457964Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2024-11-21T23:14:29.458058Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 partition ready for read: partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1732230864227, sizeLag# 519 2024-11-21T23:14:29.458087Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1TEvPartitionReady. Aval parts: 1 2024-11-21T23:14:29.458163Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 performing read request: guid# 71f6ce4a-35e3e541-636b66d5-d58d9ea0, from# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), count# 4, size# 622, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T23:14:29.458298Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 READ FROM TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1)maxCount 4 maxSize 622 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 71f6ce4a-35e3e541-636b66d5-d58d9ea0 2024-11-21T23:14:29.462748Z node 12 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2024-11-21T23:14:29.462805Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2024-11-21T23:14:29.463000Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 1 Topic 'test-topic' partition 0 user test-consumer offset 0 count 4 size 622 endOffset 4 max time lag 0ms effective offset 0 2024-11-21T23:14:29.463054Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T23:14:29.463518Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2024-11-21T23:14:29.463552Z node 12 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T23:14:29.463756Z node 12 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:14:29.463828Z node 12 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T23:14:29.464369Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 98 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1732230864227 CreateTimestampMS: 1732230864194 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 91 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1732230864227 CreateTimestampMS: 1732230864194 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 98 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1732230864227 CreateTimestampMS: 1732230864194 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 71 bytes ..." SourceId: "" SeqNo: 4 WriteTimestampMS: 1732230864247 CreateTimestampMS: 1732230864194 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 29 RealReadOffset: 3 WaitQuotaTimeMs: 0 } Cookie: 0 } 2024-11-21T23:14:29.464638Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2024-11-21T23:14:29.464694Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 after read state TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 71f6ce4a-35e3e541-636b66d5-d58d9ea0 has messages 1 2024-11-21T23:14:29.464815Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 read done: guid# 71f6ce4a-35e3e541-636b66d5-d58d9ea0, partition# TopicId: Topic /Root/test-topic in database: Root, partition 0(assignId:1), size# 387 2024-11-21T23:14:29.464860Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 response to read: guid# 71f6ce4a-35e3e541-636b66d5-d58d9ea0 2024-11-21T23:14:29.465140Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 Process answer. Aval parts: 0 2024-11-21T23:14:29.469542Z :DEBUG: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] [] Got ReadResponse, serverBytesSize = 387, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428413 2024-11-21T23:14:29.469787Z :DEBUG: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428413 2024-11-21T23:14:29.470424Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2024-11-21T23:14:29.470503Z :DEBUG: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] [] Returning serverBytesSize = 387 to budget 2024-11-21T23:14:29.470548Z :DEBUG: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] [] In ContinueReadingDataImpl, ReadSizeBudget = 387, ReadSizeServerDelta = 52428413 2024-11-21T23:14:29.470813Z :DEBUG: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T23:14:29.471096Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2024-11-21T23:14:29.471188Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 1} (1-1) 2024-11-21T23:14:29.471217Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 2} (2-2) 2024-11-21T23:14:29.471247Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (3-3) 2024-11-21T23:14:29.471384Z :DEBUG: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] [] The application data is transferred to the client. Number of messages 4, size 14 bytes 2024-11-21T23:14:29.471456Z :DEBUG: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:14:29.471750Z :INFO: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] Closing read session. Close timeout: 0.000000s 2024-11-21T23:14:29.471810Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:0 2024-11-21T23:14:29.471882Z :INFO: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] Counters: { Errors: 0 CurrentSessionLifetimeMs: 132 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:14:29.472053Z :NOTICE: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:14:29.472120Z :DEBUG: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] [] Abort session to cluster 2024-11-21T23:14:29.472931Z :NOTICE: [/Root] [/Root] [86561e7f-d9b803c-39cc2c35-2aaf26b1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:14:29.473269Z :INFO: [/Root] SessionId [7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2024-11-21T23:14:29.473313Z :INFO: [/Root] SessionId [7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0] PartitionId [0] Generation [1] Write session will now close 2024-11-21T23:14:29.473392Z :DEBUG: [/Root] SessionId [7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0] PartitionId [0] Generation [1] Write session: aborting 2024-11-21T23:14:29.473922Z :INFO: [/Root] SessionId [7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2024-11-21T23:14:29.473971Z :DEBUG: [/Root] SessionId [7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0] PartitionId [0] Generation [1] Write session: destroy 2024-11-21T23:14:29.521340Z node 12 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 grpc read done: success# 1, data# { read_request { bytes_size: 387 } } 2024-11-21T23:14:29.521413Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 grpc closed 2024-11-21T23:14:29.521460Z node 12 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_12_1_3373168249169576971_v1 is DEAD 2024-11-21T23:14:29.523102Z node 12 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0 grpc read done: success: 0 data: 2024-11-21T23:14:29.523134Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0 grpc read failed 2024-11-21T23:14:29.523173Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0 grpc closed 2024-11-21T23:14:29.523202Z node 12 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 7b633add-c3d5d8f7-76501705-a63f8a1a|f7762daa-32942e8c-3e24423b-1bf8b2a4_0 is DEAD 2024-11-21T23:14:29.524533Z node 12 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:14:29.539416Z node 12 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [12:7439874933664499676:2506] disconnected; active server actors: 1 2024-11-21T23:14:29.539469Z node 12 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [12:7439874933664499676:2506] client test-consumer disconnected session test-consumer_12_1_3373168249169576971_v1 2024-11-21T23:14:29.539662Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:14:29.539721Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [12:7439874907894695728:2459] destroyed 2024-11-21T23:14:29.539828Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:14:29.540046Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:14:29.540078Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_12_1_3373168249169576971_v1 2024-11-21T23:14:29.540108Z node 12 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [12:7439874933664499679:2509] destroyed 2024-11-21T23:14:29.540170Z node 12 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_12_1_3373168249169576971_v1 >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.765569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.765754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.765807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.765854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.765898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.765939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.765995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.913328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.913398Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.925715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.929781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.929929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.960158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.963887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.968355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.982875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.997272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.997378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.997462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.997596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.024076Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.185036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.185295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.185470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.185648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.185702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.188293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.188453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.188655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.188701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.188735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.188762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.190886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.190962Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.191023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.192746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.192818Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.192861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.192931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.196853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.198782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.198961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.200051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.200236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.200295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.200567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.200638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.200792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.200864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.202894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.202937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.203107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.203151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.203442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.203494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.203600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.203665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.203714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.203755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.203794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.203847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.203901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.203944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.203982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.205743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.205842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.205959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.205996Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.206038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.206127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 321 } } 2024-11-21T23:14:37.518512Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:14:37.518802Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 61500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 321 } } 2024-11-21T23:14:37.518989Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId#102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 61500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 321 } } 2024-11-21T23:14:37.519052Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2024-11-21T23:14:37.519239Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2024-11-21T23:14:37.519303Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2024-11-21T23:14:37.522880Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:37.523200Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:37.523315Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:14:37.523463Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T23:14:37.523749Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:14:37.526611Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:14:37.526867Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2024-11-21T23:14:37.528646Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:37.528880Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 51539609707 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:37.528991Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId#102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:14:37.529457Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2024-11-21T23:14:37.529688Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2024-11-21T23:14:37.542465Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:37.542569Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:14:37.543072Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:37.543175Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:14:37.544168Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:37.544270Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2024-11-21T23:14:37.545469Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:37.545637Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:37.545704Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:14:37.545774Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2024-11-21T23:14:37.545854Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:14:37.546000Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T23:14:37.547464Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2304 } } 2024-11-21T23:14:37.547538Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:14:37.547696Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2304 } } 2024-11-21T23:14:37.547862Z node 12 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2304 } } 2024-11-21T23:14:37.549237Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:37.549314Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:14:37.549564Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:37.549660Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:14:37.549821Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:37.549947Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:37.550029Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:37.550112Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:14:37.550206Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:14:37.554290Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:14:37.556453Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:37.556701Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:37.556887Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:37.556942Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:14:37.557152Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:14:37.557224Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:37.557311Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:14:37.557471Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:340:2315] message: TxId: 102 2024-11-21T23:14:37.557609Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:37.557706Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:14:37.557802Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:14:37.558045Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:14:37.561487Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:14:37.561582Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:389:2363] TestWaitNotification: OK eventTxId 102 >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TSchemeShardTest::FindSubDomainPathId >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |84.7%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |84.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TConsoleTests::TestCreateTenant >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate |84.7%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |84.7%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> TConsoleConfigTests::TestModifyConfigItem |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> KqpQueryService::MaterializeTxResults [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:14:32.281962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:14:32.282053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:14:32.282091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:14:32.282116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:14:32.282154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:14:32.282171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:14:32.282236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:14:32.282569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:14:32.350341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:32.350429Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:32.375350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:14:32.375648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:14:32.375865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:14:32.391897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:14:32.392818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:14:32.393490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:32.393773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:14:32.400386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:32.401859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:32.401919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:32.402104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:14:32.402146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:32.402180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:14:32.402292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:14:32.409069Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:14:32.523123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:14:32.523418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:32.523687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:14:32.523987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:14:32.524044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:32.526616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:32.526752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:14:32.526977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:32.527061Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:14:32.527102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:14:32.527141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:14:32.529323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:32.529372Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:14:32.529401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:14:32.531225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:32.531281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:32.531372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:32.531450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:14:32.535070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:14:32.537263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:14:32.537455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:14:32.538245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:32.538430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:32.538472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:32.538749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:14:32.538803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:32.538935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:14:32.539012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:14:32.541222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:32.541270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:32.541452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:32.541504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:14:32.541866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:32.541927Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:14:32.542042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:14:32.542075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:14:32.542117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:14:32.542184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:14:32.542233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:14:32.542260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:14:32.542317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:14:32.542346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:14:32.542370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:14:32.544055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:14:32.544178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:14:32.544218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:14:32.544254Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:14:32.544301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:14:32.544405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... untTxs#1 2024-11-21T23:14:44.387460Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Ack mediator stepId#5000003 2024-11-21T23:14:44.387516Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2024-11-21T23:14:44.387752Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [8:123:2149], Recipient [8:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T23:14:44.387792Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:14:44.387893Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:44.387927Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:44.388075Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:14:44.388246Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:44.388283Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T23:14:44.388319Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:14:44.388800Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:44.388849Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:14:44.388986Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:14:44.389026Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:14:44.389074Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:44.389141Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T23:14:44.389201Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:44.389257Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:14:44.389300Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:14:44.389482Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:14:44.389541Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T23:14:44.389592Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T23:14:44.389639Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T23:14:44.390614Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [8:201:2204], Recipient [8:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2024-11-21T23:14:44.390661Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T23:14:44.390759Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:44.390850Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:44.390897Z node 8 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:14:44.390947Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T23:14:44.390997Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:14:44.391092Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:14:44.392480Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [8:201:2204], Recipient [8:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2024-11-21T23:14:44.392523Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2024-11-21T23:14:44.392593Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:44.392671Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:44.392701Z node 8 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:14:44.392733Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T23:14:44.392768Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:14:44.392861Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T23:14:44.392918Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:14:44.393205Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [8:123:2149], Recipient [8:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2024-11-21T23:14:44.393246Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2024-11-21T23:14:44.393304Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:14:44.393354Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:14:44.393436Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:14:44.399008Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:14:44.400438Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:14:44.400494Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:14:44.402166Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:14:44.402204Z node 8 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:14:44.402300Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:14:44.402574Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:14:44.402629Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:14:44.403106Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [8:439:2396], Recipient [8:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:14:44.403183Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:14:44.403234Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:14:44.403431Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [8:355:2336], Recipient [8:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2024-11-21T23:14:44.403464Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2024-11-21T23:14:44.403547Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:14:44.403653Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:14:44.403702Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:437:2394] 2024-11-21T23:14:44.403920Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [8:439:2396], Recipient [8:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:14:44.403959Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:14:44.404001Z node 8 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2024-11-21T23:14:44.404479Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [8:440:2397], Recipient [8:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T23:14:44.404542Z node 8 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T23:14:44.404656Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:44.404865Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 223us result status StatusPathDoesNotExist 2024-11-21T23:14:44.405033Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> KikimrIcGateway::TestLoadTableMetadata >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> TReplicationTests::CopyReplicatedTable [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:14:33.362018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:14:33.362109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:14:33.362164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:14:33.362199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:14:33.362244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:14:33.362274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:14:33.362336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:14:33.362703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:14:33.452997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:33.453061Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:33.474271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:14:33.481494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:14:33.481780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:14:33.491896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:14:33.494024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:14:33.494820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:33.495227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:14:33.501843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:33.503618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:33.503708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:33.503925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:14:33.503992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:33.504033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:14:33.504200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:14:33.520294Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:14:34.303453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:14:34.303771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:34.304219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:14:34.304473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:14:34.304524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:34.328221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:34.328365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:14:34.328597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:34.328695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:14:34.328746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:14:34.328780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:14:34.336535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:34.337119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:14:34.337158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:14:34.363783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:34.363863Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:34.363922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:34.363989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:14:34.419526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:14:34.435428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:14:34.435791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:14:34.437001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:34.437263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:34.437318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:34.437692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:14:34.437747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:34.438030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:14:34.438186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:14:34.464010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:34.464080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:34.464293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:34.464363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:14:34.464788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:34.464851Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:14:34.464964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:14:34.464999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:14:34.465044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:14:34.465129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:14:34.465176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:14:34.465208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:14:34.465296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:14:34.465343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:14:34.465378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:14:34.473624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:14:34.473825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:14:34.473866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:14:34.473904Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:14:34.473946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:14:34.474087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:45.652468Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:14:45.652499Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:14:45.652520Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:14:45.652577Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2024-11-21T23:14:45.664356Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:14:45.664522Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:14:45.665775Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1658 } } 2024-11-21T23:14:45.665822Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:14:45.665965Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1658 } } 2024-11-21T23:14:45.666073Z node 8 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1658 } } 2024-11-21T23:14:45.666881Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:45.666950Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2024-11-21T23:14:45.667114Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:45.667182Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:14:45.667297Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 402 RawX2: 34359740741 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:45.667373Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:45.667426Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2024-11-21T23:14:45.670786Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:45.671383Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:45.695591Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:45.695667Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2024-11-21T23:14:45.695824Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:45.695873Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:14:45.695945Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 34359740660 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2024-11-21T23:14:45.696014Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:45.696059Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:45.696111Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:14:45.696159Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2024-11-21T23:14:45.696189Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2024-11-21T23:14:45.698303Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:45.698740Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:45.698819Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2024-11-21T23:14:45.698889Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T23:14:45.698936Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2024-11-21T23:14:45.699086Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2024-11-21T23:14:45.699130Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2024-11-21T23:14:45.701279Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:45.701337Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:14:45.701480Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:14:45.701521Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:45.701574Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2024-11-21T23:14:45.701674Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:338:2313] message: TxId: 102 2024-11-21T23:14:45.701733Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:45.701787Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:14:45.701827Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:14:45.702000Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:14:45.702044Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:14:45.703947Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:14:45.704009Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:428:2392] TestWaitNotification: OK eventTxId 102 2024-11-21T23:14:45.704873Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:45.705142Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 310us result status StatusSuccess 2024-11-21T23:14:45.705576Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |84.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |84.8%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |84.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrIcGateway::TestCreateExternalTable >> TConsoleTests::TestCreateTenant [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> TTicketParserTest::BulkAuthorizationRetryError >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> TConsoleTests::TestCreateTenantExtSubdomain >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> TConfigsDispatcherTests::TestYamlEndToEnd >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TConsoleConfigTests::TestGetItems >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KikimrIcGateway::TestDropTable [GOOD] >> TTicketParserTest::AuthenticationWithUserAccount >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> KikimrIcGateway::TestDropResourcePool >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> TConsoleConfigHelpersTests::TestConfigSubscriber >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleTests::TestListTenants >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> TConsoleConfigTests::TestAutoOrder >> KikimrIcGateway::TestCreateExternalTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.763040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.764573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.764642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.764723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.764770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.764807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.764871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.880862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.880942Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.917777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.922177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.924048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.959760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.963571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.968314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.992078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.993428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.993493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.999655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.999723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.999792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.999882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.020872Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.148985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.149268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.149517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.149754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.149816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.152612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.152807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.153047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.153113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.153160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.153203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.155867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.155946Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.155987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.158273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.158337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.159078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.159177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.164195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.166535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.166808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.169442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.169591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.169668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.171636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.171743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.171931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.172025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.175636Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.175691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.175882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.175926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.176824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.176873Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.176978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.177026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.177074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.177113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.177165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.177226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.177302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.177366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.177401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.180957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.181086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.181125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.181172Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.181243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.181358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:14:47.376269Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:14:47.376445Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T23:14:47.376940Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:47.377179Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 64424511593 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:47.377306Z node 15 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#102:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2024-11-21T23:14:47.377669Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T23:14:47.378088Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:14:47.378210Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:14:47.381478Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:47.381529Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:14:47.381727Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:14:47.381989Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:47.382036Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:14:47.382081Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:201:2204], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T23:14:47.382753Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:14:47.382816Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:14:47.383044Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:14:47.383107Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:47.383198Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T23:14:47.383273Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:14:47.383353Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:14:47.383412Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:14:47.383715Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:14:47.383802Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T23:14:47.383864Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T23:14:47.383923Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T23:14:47.384895Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:47.384995Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:47.385046Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:14:47.385124Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T23:14:47.385186Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:14:47.388327Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:47.388440Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:14:47.388478Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:14:47.388512Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T23:14:47.388548Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:14:47.388659Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T23:14:47.402121Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:14:47.403992Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:14:47.404366Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:14:47.404450Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:14:47.411198Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:14:47.411402Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:14:47.411509Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:455:2414] TestWaitNotification: OK eventTxId 102 2024-11-21T23:14:47.412395Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:14:47.412810Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 456us result status StatusSuccess 2024-11-21T23:14:47.413673Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:47.418055Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:14:47.418442Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 436us result status StatusSuccess 2024-11-21T23:14:47.419132Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:47.516115Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 >> KikimrIcGateway::TestCreateResourcePool >> TSequence::CreateSequence >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> KqpDocumentApi::RestrictDrop [GOOD] >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TTicketParserTest::AuthorizationRetryError >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TSequence::CreateSequence [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TInterconnectTest::TestSimplePingPong >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> KikimrIcGateway::TestDropResourcePool [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> TConsoleConfigTests::TestAutoKind >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> TConsoleTests::TestListTenants [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink [FAIL] >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> TConsoleTests::TestCreateSharedTenant >> TSequence::CreateDropRecreate >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TInterconnectTest::TestSimplePingPong [GOOD] >> TTicketParserTest::AuthenticationUnsupported >> TInterconnectTest::TestSubscribeByFlag >> TSequence::CreateDropRecreate [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> TConsoleConfigTests::TestAutoSplit >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TInterconnectTest::TestReconnect >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> TConsoleTests::TestCreateServerlessTenant >> KikimrIcGateway::TestSecretsExistingValidation >> TConsoleTests::TestTenantConfigConsistency >> KikimrIcGateway::TestALterResourcePool >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TTicketParserTest::AuthenticationUnknown >> TConsoleTests::TestModifyUsedZoneKind >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> KikimrIcGateway::TestALterResourcePool [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> TConsoleTests::TestSetConfig >> TTicketParserTest::Authorization >> KqpQueryService::DdlSecret [GOOD] >> TConsoleTests::TestMergeConfig >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TInterconnectTest::TestReconnect [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] >> TSequence::CopyTableWithSequence >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> TTicketParserTest::BulkAuthorization |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2024-11-21T23:14:20.770851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874893101772045:2252];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:20.771128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002532/r3tmp/tmpz2CunR/pdisk_1.dat 2024-11-21T23:14:21.185315Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:21.187105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:21.187216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:21.190325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23680, node 1 2024-11-21T23:14:21.295355Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:21.295376Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:21.295385Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:21.295537Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:14:21.458502Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T23:14:21.464214Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:32282, port: 32282 2024-11-21T23:14:21.464308Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2024-11-21T23:14:21.470338Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2024-11-21T23:14:21.470869Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****F-Ug (F1AEF328) () has now retryable error message 'Could not login via LDAP' 2024-11-21T23:14:24.390509Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874912297686164:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:24.390650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002532/r3tmp/tmpfBgaxV/pdisk_1.dat 2024-11-21T23:14:24.514636Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:24.536024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:24.536110Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:24.537814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12484, node 2 2024-11-21T23:14:24.671258Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:24.671283Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:24.671289Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:24.671380Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:14:24.959634Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T23:14:24.964978Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****wOnw (435EA3EF) () has now permanent error message 'Could not login via LDAP' 2024-11-21T23:14:28.674898Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439874928515432364:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002532/r3tmp/tmpR5lFOY/pdisk_1.dat 2024-11-21T23:14:28.767319Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:14:28.920438Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:28.952074Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:28.962713Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:28.967609Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24058, node 3 2024-11-21T23:14:29.123349Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:29.123372Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:29.123381Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:29.123664Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:14:29.430546Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T23:14:29.447531Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****sdZA (F25A07DA) () has now permanent error message 'Could not login via LDAP' 2024-11-21T23:14:39.579780Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439874974803682435:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002532/r3tmp/tmp9cAtD8/pdisk_1.dat 2024-11-21T23:14:39.769982Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:14:40.110014Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:40.119650Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:40.119730Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:40.136355Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20207, node 4 2024-11-21T23:14:40.353556Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:40.353582Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:40.353590Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:40.353710Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:14:40.634002Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T23:14:40.649219Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****cJ_g (56296BF0) () has now permanent error message 'Could not login via LDAP' 2024-11-21T23:14:44.611171Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439874997756164579:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:44.611215Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002532/r3tmp/tmpIoyI12/pdisk_1.dat 2024-11-21T23:14:45.025695Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:45.047786Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:45.047878Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:45.051007Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16220, node 5 2024-11-21T23:14:45.135079Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:45.135112Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:45.135124Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:45.135254Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:14:45.488994Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T23:14:45.492423Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****Cocg (5F327CEB) () has now permanent error message 'Could not login via LDAP' 2024-11-21T23:14:50.598003Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439875024075782685:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002532/r3tmp/tmpbgRlkr/pdisk_1.dat 2024-11-21T23:14:50.709425Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:14:50.791145Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:50.794771Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:50.794860Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:50.797639Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28641, node 6 2024-11-21T23:14:51.011008Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:51.011039Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:51.011049Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:51.011176Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:14:51.218508Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2024-11-21T23:14:51.219440Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:6906, port: 6906 2024-11-21T23:14:51.219570Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T23:14:51.414675Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2024-11-21T23:14:51.468152Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Ddfg (182D628D) () has now valid token of ldapuser@ldap >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> TTicketParserTest::Authorization [GOOD] >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TSequence::CopyTableWithSequence [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> TTicketParserTest::BulkAuthorization [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestRemoveTenant >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 >> TTicketParserTest::AuthorizationModify >> TSequence::AlterSequence >> TConsoleTests::TestTenantGenerationExtSubdomain >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> KqpQueryService::DdlTx >> TTicketParserTest::AuthorizationWithRequiredPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2024-11-21T23:14:41.434796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:41.434871Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:41.536182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:43.237939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:43.238011Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:43.330555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:44.784478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:44.784561Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:44.834435Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:46.335000Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:46.335068Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:46.416286Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:48.247582Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:48.247652Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:48.302086Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:49.775550Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:49.775615Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:49.823447Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:51.046411Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:51.046487Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:51.118164Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:52.776796Z node 8 :CMS_CONFIGS TRACE: StateWork, received event# 269877760, Sender [8:314:2283], Recipient [8:313:2280]: NKikimr::TEvTabletPipe::TEvClientConnected 2024-11-21T23:14:52.776891Z node 8 :CMS_CONFIGS TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2024-11-21T23:14:52.823615Z node 8 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [8:313:2280], Recipient [8:353:2294]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 8 Host: "ghrun-uc25mmfz34.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2024-11-21T23:14:52.823968Z node 8 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [8:313:2280], Recipient [8:360:2306]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 8 Host: "ghrun-uc25mmfz34.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2024-11-21T23:14:52.824036Z node 8 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionRequest 2024-11-21T23:14:52.824176Z node 8 :CMS_CONFIGS DEBUG: TConfigsProvider registered new subscription [8:313:2280]:1 2024-11-21T23:14:52.824292Z node 8 :CMS_CONFIGS TRACE: TConfigsProvider: check if update is required for volatile subscription [8:313:2280]:1 2024-11-21T23:14:52.824373Z node 8 :CMS_CONFIGS TRACE: TConfigsProvider: new config found for subscription [8:313:2280]:1 version= 2024-11-21T23:14:52.824531Z node 8 :CMS_CONFIGS TRACE: TSubscriptionClientSender([8:313:2280]) send TEvConfigSubscriptionResponse 2024-11-21T23:14:52.825626Z node 8 :CMS_CONFIGS TRACE: StateWork, received event# 273286169, Sender [8:406:2306], Recipient [8:313:2280]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionResponse { Generation: 1 Status { Code: SUCCESS } } 2024-11-21T23:14:52.825684Z node 8 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionResponse 2024-11-21T23:14:52.825940Z node 8 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [8:360:2306], Recipient [8:406:2306]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { } YamlConfigNotChanged: true } 2024-11-21T23:14:52.825999Z node 8 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T23:14:52.826087Z node 8 :CMS_CONFIGS TRACE: TSubscriptionClientSender([8:313:2280]) send TEvConfigSubscriptionNotificationRequest: Order: 1 Generation: 1 Config { } YamlConfigNotChanged: true 2024-11-21T23:14:52.826827Z node 8 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [8:406:2306], Recipient [8:313:2280]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Order: 1 Generation: 1 Config { } YamlConfigNotChanged: true } 2024-11-21T23:14:52.826981Z node 8 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2024-11-21T23:14:52.834412Z node 8 :CMS_CONFIGS TRACE: StateWork, received event# 273285129, Sender [8:313:2280], Recipient [8:353:2294]: NKikimr::NConsole::TEvConsole::TEvGetNodeConfigRequest { Node { NodeId: 8 Host: "ghrun-uc25mmfz34.auto.internal" Tenant: "" NodeType: "" } ItemKinds: 29 ItemKinds: 1 ItemKinds: 2 ItemKinds: 32 ItemKinds: 3 ItemKinds: 33 ItemKinds: 34 ItemKinds: 6 ItemKinds: 36 ItemKinds: 37 ItemKinds: 8 ItemKinds: 38 ItemKinds: 10 ItemKinds: 39 ItemKinds: 43 ItemKinds: 73 ItemKinds: 75 ItemKinds: 46 ItemKinds: 77 ItemKinds: 80 ItemKinds: 81 ItemKinds: 52 ItemKinds: 54 ItemKinds: 25 ItemKinds: 55 ItemKinds: 26 } 2024-11-21T23:14:52.834659Z node 8 :CMS_CONFIGS TRACE: StateWork, received event# 273285129, Sender [8:313:2280], Recipient [8:360:2306]: NKikimr::NConsole::TEvConsole::TEvGetNodeConfigRequest { Node { NodeId: 8 Host: "ghrun-uc25mmfz34.auto.internal" Tenant: "" NodeType: "" } ItemKinds: 29 ItemKinds: 1 ItemKinds: 2 ItemKinds: 32 ItemKinds: 3 ItemKinds: 33 ItemKinds: 34 ItemKinds: 6 ItemKinds: 36 ItemKinds: 37 ItemKinds: 8 ItemKinds: 38 ItemKinds: 10 ItemKinds: 39 ItemKinds: 43 ItemKinds: 73 ItemKinds: 75 ItemKinds: 46 ItemKinds: 77 ItemKinds: 80 ItemKinds: 81 ItemKinds: 52 ItemKinds: 54 ItemKinds: 25 ItemKinds: 55 ItemKinds: 26 } 2024-11-21T23:14:52.834707Z node 8 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvGetNodeConfigRequest 2024-11-21T23:14:52.834870Z node 8 :CMS_CONFIGS TRACE: Send TEvGetNodeConfigResponse: Status { Code: SUCCESS } Config { } 2024-11-21T23:14:52.834985Z node 8 :CMS_CONFIGS INFO: TLogSettingsConfigurator: got new config: 2024-11-21T23:14:52.835081Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GLOBAL has been changed from WARN to NOTICE 2024-11-21T23:14:52.835150Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GLOBAL has been changed from WARN to DEBUG 2024-11-21T23:14:52.835196Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT has been changed from WARN to NOTICE 2024-11-21T23:14:52.835224Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT has been changed from WARN to DEBUG 2024-11-21T23:14:52.835249Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TEST has been changed from WARN to NOTICE 2024-11-21T23:14:52.835275Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TEST has been changed from WARN to DEBUG 2024-11-21T23:14:52.835301Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component PROTOCOLS has been changed from WARN to NOTICE 2024-11-21T23:14:52.835328Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component PROTOCOLS has been changed from WARN to DEBUG 2024-11-21T23:14:52.835356Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to NOTICE 2024-11-21T23:14:52.835382Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to DEBUG 2024-11-21T23:14:52.835405Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_STATUS has been changed from WARN to NOTICE 2024-11-21T23:14:52.835429Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_STATUS has been changed from WARN to DEBUG 2024-11-21T23:14:52.835456Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_NETWORK has been changed from WARN to NOTICE 2024-11-21T23:14:52.835501Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_NETWORK has been changed from WARN to DEBUG 2024-11-21T23:14:52.835526Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SESSION has been changed from WARN to NOTICE 2024-11-21T23:14:52.835555Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SESSION has been changed from WARN to DEBUG 2024-11-21T23:14:52.835594Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component HTTP has been changed from WARN to NOTICE 2024-11-21T23:14:52.835622Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component HTTP has been changed from WARN to DEBUG 2024-11-21T23:14:52.835648Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LOGGER has been changed from WARN to NOTICE 2024-11-21T23:14:52.835675Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LOGGER has been changed from WARN to DEBUG 2024-11-21T23:14:52.835707Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOBSTORAGE has been changed from WARN to NOTICE 2024-11-21T23:14:52.835735Z node 8 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BLOBSTORAGE has been changed from WARN to DEBUG 2024-11-21T23:14:52.835765 ... 2024-11-21T23:14:56.177681Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOB_DEPOT_EVENTS has been changed from NOTICE to ALERT 2024-11-21T23:14:56.177709Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BLOB_DEPOT_EVENTS has been changed from DEBUG to ALERT 2024-11-21T23:14:56.177735Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BLOB_DEPOT_EVENTS has been changed from 0 to 10 2024-11-21T23:14:56.177767Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DS_LOAD_TEST has been changed from NOTICE to ALERT 2024-11-21T23:14:56.177796Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DS_LOAD_TEST has been changed from DEBUG to ALERT 2024-11-21T23:14:56.177822Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DS_LOAD_TEST has been changed from 0 to 10 2024-11-21T23:14:56.177849Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_PROVIDER has been changed from NOTICE to ALERT 2024-11-21T23:14:56.177875Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_PROVIDER has been changed from DEBUG to ALERT 2024-11-21T23:14:56.177900Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_PROVIDER has been changed from 0 to 10 2024-11-21T23:14:56.177939Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_INITIALIZER has been changed from NOTICE to ALERT 2024-11-21T23:14:56.177972Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_INITIALIZER has been changed from DEBUG to ALERT 2024-11-21T23:14:56.177998Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_INITIALIZER has been changed from 0 to 10 2024-11-21T23:14:56.178024Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178051Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178074Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2024-11-21T23:14:56.178093Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178113Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178129Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2024-11-21T23:14:56.178147Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178166Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178186Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2024-11-21T23:14:56.178204Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178223Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178240Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2024-11-21T23:14:56.178259Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178290Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178309Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2024-11-21T23:14:56.178327Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178343Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178360Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2024-11-21T23:14:56.178380Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178428Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178449Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2024-11-21T23:14:56.178479Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178511Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178542Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2024-11-21T23:14:56.178568Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178600Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178625Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2024-11-21T23:14:56.178652Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178680Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178746Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2024-11-21T23:14:56.178804Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178833Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2024-11-21T23:14:56.178861Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2024-11-21T23:14:56.178946Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2024-11-21T23:14:56.178986Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179016Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2024-11-21T23:14:56.179046Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2024-11-21T23:14:56.179073Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179098Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2024-11-21T23:14:56.179126Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2024-11-21T23:14:56.179151Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179177Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2024-11-21T23:14:56.179206Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2024-11-21T23:14:56.179236Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179266Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2024-11-21T23:14:56.179314Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2024-11-21T23:14:56.179350Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179384Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2024-11-21T23:14:56.179415Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2024-11-21T23:14:56.179442Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179486Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2024-11-21T23:14:56.179517Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2024-11-21T23:14:56.179547Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179575Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2024-11-21T23:14:56.179624Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2024-11-21T23:14:56.179655Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179679Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2024-11-21T23:14:56.179706Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2024-11-21T23:14:56.179735Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2024-11-21T23:14:56.179760Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2024-11-21T23:14:56.179907Z node 11 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2024-11-21T23:14:30.759425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:30.759520Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:30.865968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:31.945180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:31.945246Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:32.032325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:33.401579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:33.401652Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:33.454897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:36.971397Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:36.971501Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:37.100046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:39.443402Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:39.443507Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:39.557212Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:52.325528Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:52.325605Z node 15 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:52.383965Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:53.865982Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:53.866075Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:53.933504Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:00.256086Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:00.256172Z node 21 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:00.301048Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:01.449465Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:01.449551Z node 22 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:01.503025Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:02.657942Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:02.658034Z node 23 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:02.716201Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 18337, MsgBus: 7998 2024-11-21T23:14:48.921136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875016035562504:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:48.964431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00117b/r3tmp/tmp6VjxUP/pdisk_1.dat 2024-11-21T23:14:49.770904Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:49.774110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:49.774173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:49.785252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18337, node 1 2024-11-21T23:14:49.910127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:49.910156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:49.910168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:49.910291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7998 TClient is connected to server localhost:7998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:50.625044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:50.651025Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:50.659223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2024-11-21T23:14:50.686705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:14:50.694744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2943, MsgBus: 25767 2024-11-21T23:14:53.917406Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875036601746808:2188];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00117b/r3tmp/tmpmFGJA0/pdisk_1.dat 2024-11-21T23:14:54.052964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:14:54.161902Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:54.161992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:54.163502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2943, node 2 2024-11-21T23:14:54.173685Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:54.364219Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:54.364240Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:54.364247Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:54.364344Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25767 TClient is connected to server localhost:25767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:55.128812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:55.138423Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:55.161030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28401, MsgBus: 4535 2024-11-21T23:14:59.273867Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875060967647290:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:59.273934Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00117b/r3tmp/tmpHLgfXT/pdisk_1.dat 2024-11-21T23:14:59.420099Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:59.459956Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:59.460022Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:59.461430Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28401, node 3 2024-11-21T23:14:59.539719Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:59.539741Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:59.539749Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:59.539844Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4535 TClient is connected to server localhost:4535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:00.036795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:00.042118Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:15:00.058812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:15:00.080907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TTicketParserTest::AuthorizationModify [GOOD] >> TConsoleConfigTests::TestValidation >> TConsoleConfigTests::TestValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:14:54.707742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:14:54.707851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:14:54.707907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:14:54.707954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:14:54.708000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:14:54.708029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:14:54.708097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:14:54.708460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:14:54.780427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:54.780479Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:54.791911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:14:54.795213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:14:54.795450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:14:54.801534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:14:54.802125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:14:54.802761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:54.803115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:14:54.808846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:54.810483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:54.810552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:54.810786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:14:54.810824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:54.810856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:14:54.810919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:14:54.818927Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:14:54.944056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:14:54.944228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:54.944407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:14:54.944586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:14:54.944629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:54.947662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:54.947812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:14:54.948038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:54.948098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:14:54.948142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:14:54.948175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:14:54.950722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:54.950782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:14:54.950812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:14:54.952818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:54.952862Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:54.952901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:54.952946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:14:54.956674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:14:54.960304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:14:54.960494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:14:54.961535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:14:54.961718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:14:54.961765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:54.962128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:14:54.962189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:14:54.962385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:14:54.962557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:14:54.968637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:14:54.968702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:14:54.968892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:14:54.968958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:14:54.969314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:14:54.969364Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:14:54.969466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:14:54.969497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:14:54.969545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:14:54.969612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:14:54.969671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:14:54.969706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:14:54.969803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:14:54.969844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:14:54.969879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:14:54.971951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:14:54.972053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:14:54.972091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:14:54.972146Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:14:54.972199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:14:54.972319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 114 2024-11-21T23:15:03.038553Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 114, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2024-11-21T23:15:03.038578Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2024-11-21T23:15:03.038675Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2024-11-21T23:15:03.038734Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:15:03.040499Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:978:2927], Recipient [7:123:2149]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1378 } } 2024-11-21T23:15:03.040551Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2024-11-21T23:15:03.040678Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1378 } } 2024-11-21T23:15:03.040731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2024-11-21T23:15:03.040861Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1378 } } 2024-11-21T23:15:03.040958Z node 7 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1378 } } 2024-11-21T23:15:03.041022Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:15:03.042827Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1038:2980], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:03.042880Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:03.042923Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2024-11-21T23:15:03.049968Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:978:2927], Recipient [7:123:2149]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T23:15:03.050049Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2024-11-21T23:15:03.050179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T23:15:03.050239Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2024-11-21T23:15:03.050450Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T23:15:03.050526Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:15:03.050628Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 978 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2024-11-21T23:15:03.050711Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:03.050768Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T23:15:03.050807Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2024-11-21T23:15:03.050859Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2024-11-21T23:15:03.051049Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:15:03.052254Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:03.052419Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2024-11-21T23:15:03.052471Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:03.052586Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2024-11-21T23:15:03.052627Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:03.054643Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T23:15:03.054689Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:03.054826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T23:15:03.054863Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:03.054902Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2024-11-21T23:15:03.054999Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:978:2927] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2024-11-21T23:15:03.055281Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T23:15:03.055317Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T23:15:03.055370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2024-11-21T23:15:03.055405Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2024-11-21T23:15:03.055519Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:15:03.055541Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2024-11-21T23:15:03.055579Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2024-11-21T23:15:03.055642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2024-11-21T23:15:03.055706Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:392:2357] message: TxId: 114 2024-11-21T23:15:03.055772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2024-11-21T23:15:03.055837Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2024-11-21T23:15:03.055878Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2024-11-21T23:15:03.056006Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2024-11-21T23:15:03.057674Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:03.057770Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:392:2357] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2024-11-21T23:15:03.057952Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2024-11-21T23:15:03.058006Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1007:2949] 2024-11-21T23:15:03.058209Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1009:2951], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:15:03.058243Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:15:03.058266Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2024-11-21T23:15:03.059357Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1046:2988], Recipient [7:123:2149]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2024-11-21T23:15:03.059408Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:15:03.061040Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:03.061214Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2024-11-21T23:15:03.061565Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2024-11-21T23:15:03.061733Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:15:03.063661Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:03.063795Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2024-11-21T23:15:03.063850Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 12073, MsgBus: 7088 2024-11-21T23:14:46.266339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875007601814263:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:46.266519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001193/r3tmp/tmpZ7TopK/pdisk_1.dat 2024-11-21T23:14:46.658778Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:46.677230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:46.677344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:46.702681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12073, node 1 2024-11-21T23:14:46.863075Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:46.863118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:46.863135Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:46.863719Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7088 TClient is connected to server localhost:7088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:48.084346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:48.195921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:14:48.232297Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:14:50.722506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875024781684127:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:50.737552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:50.758235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:14:50.860675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:14:50.898183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:50.940347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:50.990377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875024781684431:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:50.990490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:50.990608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875024781684436:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:50.994276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2024-11-21T23:14:51.002599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875024781684438:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2024-11-21T23:14:51.293406Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875007601814263:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:51.293477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16431, MsgBus: 20201 2024-11-21T23:14:52.560151Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875034247630189:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:52.566684Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001193/r3tmp/tmph1gacB/pdisk_1.dat 2024-11-21T23:14:52.907564Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:52.934801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:52.934880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:52.947609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16431, node 2 2024-11-21T23:14:53.116492Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:53.116517Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:53.116523Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:53.116596Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20201 TClient is connected to server localhost:20201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:53.672122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:53.687011Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:53.700962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:14:53.796284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:54.060322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:54.161175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:57.342541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875055722468367:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:57.342661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:57.390386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:57.464278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:57.535195Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875034247630189:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:57.535276Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:57.537628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:14:57.573496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:14:57.611878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:14:57.685552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:14:57.755865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875055722468872:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:57.755952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:57.756045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875055722468877:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:57.759537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:14:57.780442Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875055722468879:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:14:58.943027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2024-11-21T23:14:59.662053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:15:00.177021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2024-11-21T23:15:00.747741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T23:15:01.299827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:15:01.969223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2024-11-21T23:15:02.452905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T23:15:02.489871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T23:15:04.577110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710714:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17168, MsgBus: 32254 2024-11-21T23:15:05.432764Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875089364747524:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:05.432996Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001193/r3tmp/tmpBGju45/pdisk_1.dat 2024-11-21T23:15:05.518573Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17168, node 3 2024-11-21T23:15:05.556705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:05.557353Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:05.569585Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:05.598215Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:05.598236Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:05.598243Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:05.598364Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32254 TClient is connected to server localhost:32254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:06.058140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:06.072933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:06.125453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:06.268320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:06.325406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:08.688982Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875102249651096:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:08.689097Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:08.727711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:08.763390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:08.801357Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:08.837371Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:08.870420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:08.904893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:08.998278Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875102249651596:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:08.998368Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:08.998608Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875102249651601:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:09.003088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:09.017608Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875102249651603:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> TConsoleConfigTests::TestCheckConfigUpdates >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2024-11-21T23:14:57.517139Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 (null) -> PendingActivation 2024-11-21T23:14:57.517213Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP01 ready to work 2024-11-21T23:14:57.517830Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @201 (null) -> PendingActivation 2024-11-21T23:14:57.517901Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP01 ready to work 2024-11-21T23:14:57.518026Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @96 PendingActivation -> PendingNodeInfo 2024-11-21T23:14:57.519546Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP02 configured for host 6:::1:3473 2024-11-21T23:14:57.519688Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @488 PendingNodeInfo -> PendingConnection 2024-11-21T23:14:57.520204Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2024-11-21T23:14:57.520348Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2024-11-21T23:14:57.521069Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2024-11-21T23:14:57.521547Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:45044 2024-11-21T23:14:57.521958Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2024-11-21T23:14:57.523110Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 260281 ProgramStartTime: 2736403646540 Serial: 1262766209 ReceiverNodeId: 6 SenderActorId: "[5:1262766209:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 260281" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 260281" AcceptUUID: "Cluster for process with id: 260281" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: false HandshakeId: "}\267\261\374\324\037\013*{\2103\270*]\271\365A&\357\031o\276n\345\374\252\032\363\214\000\004D" RequestXxhash: true RequestXdcShuffle: true 2024-11-21T23:14:57.523703Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 260281 ProgramStartTime: 2736403646540 Serial: 1262766209 ReceiverNodeId: 6 SenderActorId: "[5:1262766209:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 260281" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 260281" AcceptUUID: "Cluster for process with id: 260281" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: false HandshakeId: "}\267\261\374\324\037\013*{\2103\270*]\271\365A&\357\031o\276n\345\374\252\032\363\214\000\004D" RequestXxhash: true RequestXdcShuffle: true 2024-11-21T23:14:57.523776Z node 6 :INTERCONNECT WARN: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T23:14:57.524179Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @96 PendingActivation -> PendingNodeInfo 2024-11-21T23:14:57.525489Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP02 configured for host 5:::1:22642 2024-11-21T23:14:57.525540Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2024-11-21T23:14:57.525595Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @488 PendingNodeInfo -> PendingConnection 2024-11-21T23:14:57.525671Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2024-11-21T23:14:57.525741Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2024-11-21T23:14:57.525797Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @217 PendingConnection -> PendingConnection 2024-11-21T23:14:57.526254Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 260281 ProgramStartTime: 2736420996662 Serial: 3032952647 SenderActorId: "[6:3032952647:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 260281" AcceptUUID: "Cluster for process with id: 260281" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: false UseXxhash: true UseXdcShuffle: true } 2024-11-21T23:14:57.526441Z node 6 :INTERCONNECT INFO: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2024-11-21T23:14:57.527158Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 260281 ProgramStartTime: 2736420996662 Serial: 3032952647 SenderActorId: "[6:3032952647:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 260281" AcceptUUID: "Cluster for process with id: 260281" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: false UseXxhash: true UseXdcShuffle: true } 2024-11-21T23:14:57.527241Z node 5 :INTERCONNECT WARN: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T23:14:57.527309Z node 5 :INTERCONNECT INFO: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2024-11-21T23:14:57.527632Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2024-11-21T23:14:57.527718Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2024-11-21T23:14:57.527781Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @347 PendingConnection -> StateWork 2024-11-21T23:14:57.527913Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:22:2048] 2024-11-21T23:14:57.527983Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:3032952647:0] peer: [5:1262766209:0] socket: 25 2024-11-21T23:14:57.528032Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS10 traffic start 2024-11-21T23:14:57.528115Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS11 registering socket in PollerActor 2024-11-21T23:14:57.528218Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.528277Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2024-11-21T23:14:57.528327Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.528389Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2024-11-21T23:14:57.528429Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2024-11-21T23:14:57.528472Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @347 PendingConnection -> StateWork 2024-11-21T23:14:57.528577Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:24:2048] 2024-11-21T23:14:57.528635Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:1262766209:0] peer: [6:3032952647:0] socket: 24 2024-11-21T23:14:57.528669Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2024-11-21T23:14:57.528729Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2024-11-21T23:14:57.528801Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T23:14:57.528839Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2024-11-21T23:14:57.528870Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T23:14:57.528915Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2024-11-21T23:14:57.529082Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS01 InputSession created 2024-11-21T23:14:57.529131Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS01 InputSession created 2024-11-21T23:14:57.529183Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T23:14:57.529265Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.529317Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T23:14:57.529368Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.529447Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T23:14:57.529496Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.529556Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T23:14:57.529598Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.529649Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.529693Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.529735Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T23:14:57.529759Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T23:14:57.529818Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.529842Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.529867Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T23:14:57.529887Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T23:14:57.529995Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2024-11-21T23:14:57.530182Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS22 outgoing packet Serial# 1 Confirm# 0 DataSize# 84 InflightDataAmount# 84 2024-11-21T23:14:57.530290Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2024-11-21T23:14:57.530445Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T23:14:57.530509Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2024-11-21T23:14:57.530610Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.530677Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.530728Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.530802Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2024-11-21T23:14:57.530996Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS22 outgoing packet Serial# 1 Confirm# 1 DataSize# 84 InflightDataAmount# 84 2024-11-21T23:14:57.531086Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 0 2024-11-21T23:14:57.531145Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T23:14:57.531184Z node 5 ... ession [6:23:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T23:14:57.531790Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS12 Read recvres# 0 num# 1 err# 2024-11-21T23:14:57.532065Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:23:2048] [node 5] ICIS09 ReestablishConnection, reason# EndOfStream 2024-11-21T23:14:57.532159Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2024-11-21T23:14:57.532211Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T23:14:57.532243Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# 0 num# 1 err# 2024-11-21T23:14:57.532402Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS09 ReestablishConnection, reason# EndOfStream 2024-11-21T23:14:57.532482Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS07 socket disconnect 25 reason# EndOfStream 2024-11-21T23:14:57.532544Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS25 shutdown socket, reason# EndOfStream 2024-11-21T23:14:57.532644Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS15 start handshake 2024-11-21T23:14:57.532802Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS22 outgoing packet Serial# 2 Confirm# 1 DataSize# 84 InflightDataAmount# 84 2024-11-21T23:14:57.532872Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS13 reestablish connection 2024-11-21T23:14:57.532924Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# EPIPE 2024-11-21T23:14:57.532992Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T23:14:57.533475Z node 6 :INTERCONNECT DEBUG: Handshake [6:26:2058] [node 5] ICH01 starting outgoing handshake 2024-11-21T23:14:57.533606Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS07 socket disconnect -1 reason# EndOfStream 2024-11-21T23:14:57.533650Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS15 start handshake 2024-11-21T23:14:57.534066Z node 5 :INTERCONNECT DEBUG: Handshake [5:28:2058] [node 6] ICH01 starting outgoing handshake 2024-11-21T23:14:57.534192Z node 6 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2024-11-21T23:14:57.534249Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2024-11-21T23:14:57.535391Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:45056 2024-11-21T23:14:57.536192Z node 6 :INTERCONNECT DEBUG: Handshake [6:30:2059] [node 0] ICH02 starting incoming handshake 2024-11-21T23:14:57.536859Z node 6 :INTERCONNECT DEBUG: Handshake [6:26:2058] [node 5] ICH05 connected to peer 2024-11-21T23:14:57.537178Z node 5 :INTERCONNECT DEBUG: Handshake [5:28:2058] [node 6] ICH05 connected to peer 2024-11-21T23:14:57.537263Z node 5 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:35580 2024-11-21T23:14:57.537693Z node 5 :INTERCONNECT DEBUG: Handshake [5:31:2059] [node 0] ICH02 starting incoming handshake 2024-11-21T23:14:57.538382Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP09 (actor [6:30:2059]) from: [5:1262766209:0] for: [6:3032952647:0] 2024-11-21T23:14:57.538505Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS08 incoming handshake Self# [5:1262766209:0] Peer# [6:3032952647:0] Counter# 1 LastInputSerial# 1 2024-11-21T23:14:57.538570Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2024-11-21T23:14:57.538643Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP09 (actor [5:31:2059]) from: [6:3032952647:0] for: [5:1262766209:0] 2024-11-21T23:14:57.538684Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS08 incoming handshake Self# [6:3032952647:0] Peer# [5:1262766209:0] Counter# 1 LastInputSerial# 1 2024-11-21T23:14:57.538727Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:31:2059]) is held 2024-11-21T23:14:57.539057Z node 6 :INTERCONNECT INFO: Handshake [6:30:2059] [node 5] ICH04 handshake succeeded 2024-11-21T23:14:57.540347Z node 5 :INTERCONNECT INFO: Handshake [5:28:2058] [node 6] ICH04 handshake succeeded 2024-11-21T23:14:57.540551Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2024-11-21T23:14:57.540618Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:31:2059] poison: true 2024-11-21T23:14:57.540682Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:28:2058] poison: false 2024-11-21T23:14:57.540752Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @347 StateWork -> StateWork 2024-11-21T23:14:57.540819Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:28:2058] self: [5:1262766209:0] peer: [6:3032952647:0] socket: 27 2024-11-21T23:14:57.540875Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2024-11-21T23:14:57.540992Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2024-11-21T23:14:57.541067Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T23:14:57.541121Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2024-11-21T23:14:57.541210Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T23:14:57.542752Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2024-11-21T23:14:57.542819Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:30:2059] poison: false 2024-11-21T23:14:57.542878Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:26:2058] poison: true 2024-11-21T23:14:57.542922Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @347 StateWork -> StateWork 2024-11-21T23:14:57.542969Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS09 handshake done sender: [6:30:2059] self: [6:3032952647:0] peer: [5:1262766209:0] socket: 28 2024-11-21T23:14:57.543021Z node 6 :INTERCONNECT_SESSION INFO: Session [6:22:2048] [node 5] ICS10 traffic start 2024-11-21T23:14:57.543106Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS11 registering socket in PollerActor 2024-11-21T23:14:57.543151Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T23:14:57.543192Z node 6 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2024-11-21T23:14:57.543309Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2024-11-21T23:14:57.543367Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2024-11-21T23:14:57.543401Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T23:14:57.543493Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:32:2048] [node 6] ICIS01 InputSession created 2024-11-21T23:14:57.544260Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:33:2048] [node 5] ICIS01 InputSession created 2024-11-21T23:14:57.544321Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:33:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T23:14:57.544412Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:33:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2024-11-21T23:14:57.544502Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:33:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.544546Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:33:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.544591Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:32:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T23:14:57.544654Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:32:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.546728Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:32:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T23:14:57.546831Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:32:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.546911Z node 6 :INTERCONNECT NOTICE: Proxy [6:9:2048] [node 5] ICP27 obsolete handshake fail ignored 2024-11-21T23:14:57.546969Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T23:14:57.547011Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T23:14:57.547076Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:33:2048] [node 5] ICIS02 ReceiveData called 2024-11-21T23:14:57.547120Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:33:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.547160Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T23:14:57.547211Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T23:14:57.547281Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2024-11-21T23:14:57.547340Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T23:14:57.547377Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2024-11-21T23:14:57.547450Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2024-11-21T23:14:57.547548Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T23:14:57.547588Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T23:14:57.547622Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:22:2048] [node 5] ICS23 confirm count: 1 2024-11-21T23:14:57.547734Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:32:2048] [node 6] ICIS02 ReceiveData called 2024-11-21T23:14:57.547779Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:32:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2024-11-21T23:14:57.547860Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:32:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2024-11-21T23:14:57.547916Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2024-11-21T23:14:57.547966Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2024-11-21T23:14:57.548083Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2024-11-21T23:14:57.548166Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2024-11-21T23:14:57.548253Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS01 socket: 27 reason# 2024-11-21T23:14:57.548321Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:24:2048] VirtualId# [5:1262766209:0] 2024-11-21T23:14:57.548380Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 StateWork -> PendingActivation 2024-11-21T23:14:57.548432Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# 2024-11-21T23:14:57.548531Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] 2024-11-21 23:14:43,465 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2024-11-21 23:14:43,752 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 45728 75.5M 75.7M 21.6M test_tool run_ut @/home/runner/.ya/build/build_root/dl5p/001a6c/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args 46026 1.9G 1.8G 1.8G └─ ydb-services-persqueue_v1-ut --trace-path-append /home/runner/.ya/build/build_root/dl5p/001a6c/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/chunk 166881 599M 1.0G 364M └─ llvm-symbolizer --demangle --inlines --default-arch=x86_64 Test command err: === Server->StartServer(false); 2024-11-21T23:04:47.718861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439872435076488127:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:47.731959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001a6c/r3tmp/tmpe13ezq/pdisk_1.dat 2024-11-21T23:04:48.860962Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:04:48.959681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:49.220366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:04:49.220536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:04:49.222621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9674, node 1 2024-11-21T23:04:49.982113Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:04:49.986812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:04:50.005874Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:04:50.005897Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:04:50.269284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001a6c/r3tmp/yandexc1EtFQ.tmp 2024-11-21T23:04:50.269559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001a6c/r3tmp/yandexc1EtFQ.tmp 2024-11-21T23:04:50.298785Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001a6c/r3tmp/yandexc1EtFQ.tmp 2024-11-21T23:04:50.300355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:04:51.561665Z INFO: TTestServer started on Port 29034 GrpcPort 9674 2024-11-21T23:04:52.686534Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439872435076488127:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:04:52.686814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:29034 PQClient connected to localhost:9674 === TenantModeEnabled() = 0 === Init PQ - start server on port 9674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:04:53.910915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:04:53.911174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.911361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2024-11-21T23:04:53.911631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:04:53.911905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.912462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:53.912559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:04:53.912739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.912769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:04:53.912783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2024-11-21T23:04:53.912796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:04:53.913375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.913407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:04:53.913420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:04:53.913717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.913753Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.913771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:53.913810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 waiting... 2024-11-21T23:04:53.922026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:04:53.926523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:53.926552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2024-11-21T23:04:53.926572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:04:53.926715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2024-11-21T23:04:53.926845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2024-11-21T23:04:53.928349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732230293976, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:04:53.928466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 7439872443666423007 RawX2: 4294969527 } } Step: 1732230293976 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2024-11-21T23:04:53.928508Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:53.928751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:04:53.928779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:04:53.928947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:04:53.928985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2024-11-21T23:04:53.929465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:04:53.929480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:04:53.929626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:04:53.929651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7439872443666423043:2256], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2024-11-21T23:04:53.929691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:04:53.929709Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:04:53.929780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:04:53.929790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T23:04:53.929810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2024-11-21T23:04:53.929825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2024-11-21T23:04:53.929839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:04:53.929850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2024-11-21T23:04:53.929879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2024-11-21T23:04:53.929892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:04:53.929919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publ ... ffset 0x12b0 warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0x1000 has a premature terminator entry at offset 0x1010 warning: address range table at offset 0x1030 has a premature terminator entry at offset 0x1040 warning: address range table at offset 0x1060 has a premature terminator entry at offset 0x1070 warning: address range table at offset 0x1090 has a premature terminator entry at offset 0x10a0 warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 warning: address range table at offset 0x10f0 has a premature terminator entry at offset 0x1100 warning: address range table at offset 0x1120 has a premature terminator entry at offset 0x1130 warning: address range table at offset 0x1150 has a premature terminator entry at offset 0x1160 warning: address range table at offset 0x1180 has a premature terminator entry at offset 0x1190 warning: address range table at offset 0x11b0 has a premature terminator entry at offset 0x11c0 warning: address range table at offset 0x11e0 has a premature terminator entry at offset 0x11f0 warning: address range table at offset 0x1210 has a premature terminator entry at offset 0x1220 warning: address range table at offset 0x1240 has a premature terminator entry at offset 0x1250 warning: address range table at offset 0x1270 has a premature terminator entry at offset 0x1280 warning: address range table at offset 0x12a0 has a premature terminator entry at offset 0x12b0 warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0x1000 has a premature terminator entry at offset 0x1010 warning: address range table at offset 0x1030 has a premature terminator entry at offset 0x1040 warning: address range table at offset 0x1060 has a premature terminator entry at offset 0x1070 warning: address range table at offset 0x1090 has a premature terminator entry at offset 0x10a0 warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 warning: address range table at offset 0x10f0 has a premature terminator entry at offset 0x1100 warning: address range table at offset 0x1120 has a premature terminator entry at offset 0x1130 warning: address range table at offset 0x1150 has a premature terminator entry at offset 0x1160 warning: address range table at offset 0x1180 has a premature terminator entry at offset 0x1190 warning: address range table at offset 0x11b0 has a premature terminator entry at offset 0x11c0 warning: address range table at offset 0x11e0 has a premature terminator entry at offset 0x11f0 warning: address range table at offset 0x1210 has a premature terminator entry at offset 0x1220 warning: address range table at offset 0x1240 has a premature terminator entry at offset 0x1250 warning: address range table at offset 0x1270 has a premature terminator entry at offset 0x1280 warning: address range table at offset 0x12a0 has a premature terminator entry at offset 0x12b0 warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 warning: address range table at offset 0x0 has a premature terminator entry at offset 0x10 warning: address range table at offset 0x30 has a premature terminator entry at offset 0x40 warning: address range table at offset 0x60 has a premature terminator entry at offset 0x70 warning: address range table at offset 0x90 has a premature terminator entry at offset 0xa0 warning: address range table at offset 0xc0 has a premature terminator entry at offset 0xd0 warning: address range table at offset 0xa20 has a premature terminator entry at offset 0xa30 warning: address range table at offset 0x1000 has a premature terminator entry at offset 0x1010 warning: address range table at offset 0x1030 has a premature terminator entry at offset 0x1040 warning: address range table at offset 0x1060 has a premature terminator entry at offset 0x1070 warning: address range table at offset 0x1090 has a premature terminator entry at offset 0x10a0 warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 warning: address range table at offset 0x10f0 has a premature terminator entry at offset 0x1100 warning: address range table at offset 0x1120 has a premature terminator entry at offset 0x1130 warning: address range table at offset 0x1150 has a premature terminator entry at offset 0x1160 warning: address range table at offset 0x1180 has a premature terminator entry at offset 0x1190 warning: address range table at offset 0x11b0 has a premature terminator entry at offset 0x11c0 warning: address range table at offset 0x11e0 has a premature terminator entry at offset 0x11f0 warning: address range table at offset 0x1210 has a premature terminator entry at offset 0x1220 warning: address range table at offset 0x1240 has a premature terminator entry at offset 0x1250 warning: address range table at offset 0x1270 has a premature terminator entry at offset 0x1280 warning: address range table at offset 0x12a0 has a premature terminator entry at offset 0x12b0 warning: address range table at offset 0x12d0 has a premature terminator entry at offset 0x12e0 warning: address range table at offset 0x1300 has a premature terminator entry at offset 0x1310 warning: address range table at offset 0x1330 has a premature terminator entry at offset 0x1340 warning: address range table at offset 0x1390 has a premature terminator entry at offset 0x13a0 warning: address range table at offset 0x13c0 has a premature terminator entry at offset 0x13d0 warning: address range table at offset 0x13f0 has a premature terminator entry at offset 0x1400 warning: address range table at offset 0x1420 has a premature terminator entry at offset 0x1430 warning: address range table at offset 0x1540 has a premature terminator entry at offset 0x1550 warning: address range table at offset 0x1570 has a premature terminator entry at offset 0x1580 warning: address range table at offset 0x15a0 has a premature terminator entry at offset 0x15b0 warning: address range table at offset 0x15d0 has a premature terminator entry at offset 0x15e0 warning: address range table at offset 0x1600 has a premature terminator entry at offset 0x1610 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7480276291/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/dl5p/001a6c/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1747, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/7480276291/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/dl5p/001a6c/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/chunk3/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> TConsoleTests::TestAlterUnknownTenant >> TInterconnectTest::TestConnectAndDisconnect >> TInterconnectTest::TestNotifyUndelivered >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] >> TInterconnectTest::TestConnectAndDisconnect [GOOD] |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TInterconnectTest::TestManyEvents >> TInterconnectTest::TestBlobEvent >> TestProtocols::TestConnectProtocol >> TWebLoginService::AuditLogLdapLoginBadBind >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> TSchemeShardLoginTest::BasicLogin >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> TWebLoginService::AuditLogLdapLoginSuccess >> TWebLoginService::AuditLogLogout >> TWebLoginService::AuditLogLoginSuccess >> TWebLoginService::AuditLogLdapLoginBadUser >> TInterconnectTest::TestBlobEventPreSerialized >> KqpQueryService::DdlTx [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestRemoveTenant [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TInterconnectTest::TestManyEvents [GOOD] >> TConsoleTests::TestCreateTenantWrongPool >> TWebLoginService::AuditLogLogout [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> TTicketParserTest::AuthorizationWithUserAccount >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestCrossConnect >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> TestProtocols::TestConnectProtocol [GOOD] >> TInterconnectTest::TestBlobEvent [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TSchemeShardLoginTest::BasicLogin [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TestProtocols::TestHTTPCollected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] Test command err: 2024-11-21T23:14:43.276750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:43.276818Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:43.367052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:44.486769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:44.486833Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:44.550065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:46.105049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:46.105121Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:46.172816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:47.936947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:47.937020Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:47.995051Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:49.952548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:49.952639Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:50.016044Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:51.412346Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:51.412421Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:51.482415Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:52.475966Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:52.476051Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:52.549589Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:53.938047Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:53.938130Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:53.999392Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:55.583811Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:55.583900Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:55.660292Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:57.075995Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:57.076080Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:57.120098Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:58.276723Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:58.276814Z node 11 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:58.328855Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:59.528091Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:59.528174Z node 12 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:59.574538Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:00.871755Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:00.871843Z node 13 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:00.925598Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:02.078542Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:02.078635Z node 14 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:02.156420Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:03.429217Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:03.429319Z node 15 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:03.482516Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:04.636290Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:04.636391Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:04.708109Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:05.958824Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:05.958909Z node 18 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:06.019002Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:07.542711Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:07.542803Z node 20 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:07.588906Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:08.884747Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:08.884823Z node 22 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:08.934278Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |84.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |84.9%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |84.9%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |84.9%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:15.918941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:15.919031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:15.919069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:15.919124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:15.919178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:15.919210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:15.919267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:15.919622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:15.992952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:15.993007Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:16.002248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:16.006365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:16.006585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:16.012928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:16.013367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:16.014288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.014566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:16.018662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.019809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.019867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.020069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:16.020115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.020154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:16.020253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.029127Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:16.211084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.211318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.211531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.211709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.211757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.214905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.215051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:16.215255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.215319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:16.215380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:16.215422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:16.219837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.219905Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:16.219947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:16.221802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.221850Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.221892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.221953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.232118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:16.234347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:16.234564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:16.235577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.235715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.235773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.235987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:16.236029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.236193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.236258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:16.238117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.238155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.238289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.238324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:16.238605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.238646Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:16.238729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:16.238755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.238793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:16.238825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.238864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:16.238891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:16.238945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:16.238974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:16.239000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:16.240405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.240499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.240539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:16.240618Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:16.240651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.240720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:16.243270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:16.243832Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:15:16.244319Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.258500Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:15:16.260907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.261140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Login authentication is disabled, at schemeshard: 72057594046678944 2024-11-21T23:15:16.262104Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.264648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Login authentication is disabled" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.264797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Login authentication is disabled, operation: CREATE USER, path: /MyRoot 2024-11-21T23:15:16.265363Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T23:15:16.265734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T23:15:16.265782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T23:15:16.320384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Error: "Login authentication is disabled", at schemeshard: 72057594046678944 2024-11-21T23:15:16.320494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.320532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.320684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.320734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T23:15:16.321227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T23:15:16.321628Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:16.321808Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 187us result status StatusSuccess 2024-11-21T23:15:16.322341Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr1PZyFgAHwzyzXWUNTMG\nMEBbWcnMc6AL5uzFSLpl4VEi/5Xy7N+fxAmBX1NaYDS/SSdFcsPu+c3HmJymJVr2\nAPb7MLwddroGlSzndfxlYc/WeHk7LpnOtpPQxTK2yvwRCMFypIyTmVpHZ9hGcWiu\nbae0rsWYawyAAqtESzfZ4sAtGlVs6VgBHk/kTTvipwhWvfB56X4H2Vmp1OsD7/n2\nEV7mpT9XT9sunF/z6Ojtkh55WOo4vUtzBM+mygT3DnKczmRyOijmTpK9PbDPMSKE\n4bBC1U+TY3mCIkFINA7m53J2IFuA4sKRK3Me19sBOYtcjm614OGPUowtSVMQw+81\n2QIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732317316317 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> TInterconnectTest::TestBlobEvent220Bytes >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> TConsoleTxProcessorTests::TestTxProcessorSingle >> TTicketParserTest::AuthorizationUnavailable >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2024-11-21T23:14:49.976891Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875021505140109:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:49.976971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c01/r3tmp/tmp4GfGln/pdisk_1.dat 2024-11-21T23:14:50.781866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:50.781958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:50.794491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:50.857672Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65137, node 1 2024-11-21T23:14:51.142938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:51.142958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:51.142964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:51.143053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:51.639696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:51.655136Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:51.657997Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T23:14:51.658079Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Connect to grpc://localhost:20524 2024-11-21T23:14:51.662824Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T23:14:51.677733Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Status 14 Service Unavailable 2024-11-21T23:14:51.678608Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T23:14:51.678650Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T23:14:51.678744Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T23:14:51.679000Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T23:14:51.685629Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Status 14 Service Unavailable 2024-11-21T23:14:51.685788Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T23:14:51.685825Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T23:14:53.117265Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T23:14:53.117442Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T23:14:53.118090Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T23:14:53.124906Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Status 14 Service Unavailable 2024-11-21T23:14:53.130046Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T23:14:53.130092Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T23:14:54.975531Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875021505140109:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:54.975738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:55.117672Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T23:14:55.117774Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T23:14:55.118467Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T23:14:55.128812Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:14:55.129322Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2024-11-21T23:15:04.388520Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875082344423459:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:04.388611Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c01/r3tmp/tmpprpkM2/pdisk_1.dat 2024-11-21T23:15:04.533752Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9996, node 2 2024-11-21T23:15:04.565542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:04.565629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:04.567594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:04.614606Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:04.614635Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:04.614648Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:04.614792Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:04.839751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:04.846445Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T23:15:04.846497Z node 2 :GRPC_CLIENT DEBUG: [5160000e22d0] Connect to grpc://localhost:61580 2024-11-21T23:15:04.847537Z node 2 :GRPC_CLIENT DEBUG: [5160000e22d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T23:15:04.857595Z node 2 :GRPC_CLIENT DEBUG: [5160000e22d0] Status 14 Service Unavailable 2024-11-21T23:15:04.857751Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2024-11-21T23:15:04.857781Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T23:15:04.857844Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T23:15:04.858055Z node 2 :GRPC_CLIENT DEBUG: [5160000e22d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T23:15:04.863053Z node 2 :GRPC_CLIENT DEBUG: [5160000e22d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } ... zeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "gizmo" type: "iam.gizmo" } permission: "monitoring.view" } } result_filter: ALL_FAILED } 2024-11-21T23:15:08.136697Z node 3 :GRPC_CLIENT DEBUG: [5160000a8cd0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:08.136876Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T23:15:11.300453Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439875112770495341:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:11.300526Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c01/r3tmp/tmpyGzDPz/pdisk_1.dat 2024-11-21T23:15:11.384488Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63284, node 4 2024-11-21T23:15:11.427898Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:11.427977Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:11.429511Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:11.439356Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:11.439377Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:11.439384Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:11.439507Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:15:11.663400Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:11.671081Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:11.671160Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2024-11-21T23:15:11.671184Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2024-11-21T23:15:11.671205Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T23:15:11.671226Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2024-11-21T23:15:11.671290Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Connect to grpc://localhost:9736 2024-11-21T23:15:11.674658Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:11.675026Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:11.675187Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:11.675351Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:11.675559Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:11.682338Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Status 16 Access Denied 2024-11-21T23:15:11.682529Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:11.682871Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Status 16 Access Denied 2024-11-21T23:15:11.682930Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:11.683178Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Status 16 Access Denied 2024-11-21T23:15:11.683270Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:11.683419Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:11.683560Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2024-11-21T23:15:11.684912Z node 4 :GRPC_CLIENT DEBUG: [5160000e91d0] Status 16 Access Denied 2024-11-21T23:15:11.685005Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:11.685044Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T23:15:11.686546Z node 4 :GRPC_CLIENT DEBUG: [5160000e8bd0] Connect to grpc://localhost:32178 2024-11-21T23:15:11.687472Z node 4 :GRPC_CLIENT DEBUG: [5160000e8bd0] Request GetUserAccountRequest { user_account_id: "user1" } 2024-11-21T23:15:11.694013Z node 4 :GRPC_CLIENT DEBUG: [5160000e8bd0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2024-11-21T23:15:11.694283Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T23:15:14.692137Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875127849178107:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:14.692263Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c01/r3tmp/tmpAs3Nbn/pdisk_1.dat 2024-11-21T23:15:14.805929Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:14.832615Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:14.832712Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:14.834416Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8800, node 5 2024-11-21T23:15:14.892604Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:14.892638Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:14.892648Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:14.892796Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:15.195419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:15.201674Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2024-11-21T23:15:15.201734Z node 5 :GRPC_CLIENT DEBUG: [5160000c24d0] Connect to grpc://localhost:5479 2024-11-21T23:15:15.202788Z node 5 :GRPC_CLIENT DEBUG: [5160000c24d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2024-11-21T23:15:15.207749Z node 5 :GRPC_CLIENT DEBUG: [5160000c24d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:15.207940Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T23:15:15.208405Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2024-11-21T23:15:15.208663Z node 5 :GRPC_CLIENT DEBUG: [5160000c24d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2024-11-21T23:15:15.210032Z node 5 :GRPC_CLIENT DEBUG: [5160000c24d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:15.210230Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:08:58.179541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:08:58.179663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:58.179707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:08:58.179747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:08:58.179816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:08:58.179851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:08:58.179949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:08:58.180299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:08:58.258529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:08:58.258603Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:08:58.270802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:08:58.271239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:08:58.271442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:08:58.288838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:08:58.289225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:08:58.289959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:58.294653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:58.299855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:58.301343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:58.301403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:58.301473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:08:58.301518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:58.301552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:08:58.301812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:08:58.308768Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:08:58.441170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:08:58.441413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.441604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:08:58.441840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:08:58.441897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.445856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:58.445980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:08:58.446222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.446328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:08:58.446377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:08:58.446433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:08:58.450696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.450797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:08:58.450838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:08:58.452964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.453017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.453069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:58.453111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:08:58.456726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:08:58.460984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:08:58.461234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:08:58.462158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:08:58.462285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:08:58.462342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:58.462680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:08:58.462762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:08:58.462932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:08:58.463019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:08:58.465782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:08:58.465840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:08:58.466047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:08:58.466097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:08:58.466491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:08:58.466534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:08:58.466641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:08:58.466693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:58.466733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:08:58.466801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:08:58.466841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:08:58.466870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:08:58.466935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:08:58.466969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:08:58.467002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... tionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:14.042717Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:15:14.042927Z node 117 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 238us result status StatusSuccess 2024-11-21T23:15:14.043580Z node 117 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:14.064864Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1074:2817] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:15:14.064979Z node 117 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][117:992:2817] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2024-11-21T23:15:14.065117Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1074:2817] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732230914011405 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1732230914011405 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1732230914011405 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:15:14.068727Z node 117 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][117:1074:2817] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2024-11-21T23:15:14.068849Z node 117 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][117:992:2817] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlTx [GOOD] Test command err: Trying to start YDB, gRPC: 30360, MsgBus: 30031 2024-11-21T23:14:18.129979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874887961923918:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:18.130020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001f9d/r3tmp/tmpbHGvYA/pdisk_1.dat 2024-11-21T23:14:18.729927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:18.730013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:18.731950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:18.779381Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30360, node 1 2024-11-21T23:14:18.902574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:18.902593Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:18.902620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:18.902720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30031 TClient is connected to server localhost:30031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:19.790854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:19.810817Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:19.823701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:14:19.983325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:14:20.210530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:20.314329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:23.131361Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874887961923918:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:23.131655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:23.132741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874909436761974:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:23.132850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:23.389300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:23.425108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:23.498661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:14:23.551984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:14:23.594488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:14:23.647448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:14:23.728570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874909436762480:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:23.728640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:23.729044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874909436762485:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:23.731936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:14:23.741879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874909436762487:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:14:25.005489Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWM4YzViODgtMTMzMDQ1MTgtOTkwM2MzMy03OGM2NmZjNQ==, ActorId: [1:7439874913731730120:2468], ActorState: ExecuteState, TraceId: 01jd8g2r1v8h4t6wv2pt8ecren, Create QueryResponse for error on request, msg: 2024-11-21T23:14:25.102877Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmJlZDk1M2EtZGNhMjJkMmUtMzAzZTkyOTktYjQ5NDQyYw==, ActorId: [1:7439874918026697479:2477], ActorState: ExecuteState, TraceId: 01jd8g2r5s8vwhj5e77an8hea4, Create QueryResponse for error on request, msg: 2024-11-21T23:14:25.144001Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWMwZGIyNDktNDdjODAxNDgtYTM2ZTcwZDEtM2Y4OThjZDI=, ActorId: [1:7439874918026697495:2480], ActorState: ExecuteState, TraceId: 01jd8g2r6q0atetjmgkam4az7n, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8455, MsgBus: 25492 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001f9d/r3tmp/tmpsex1QQ/pdisk_1.dat 2024-11-21T23:14:26.339762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:14:26.343469Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:26.371393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:26.371485Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:26.372605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8455, node 2 2024-11-21T23:14:26.613607Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:26.613638Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:26.613647Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:26.613763Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25492 TClient is connected to server localhost:25492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:27.812427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:27.833914Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:27.860218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:28.166682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_1" } items { text_value: "qwerty" } } } } } ; Execute SQL: DROP OBJECT IF EXISTS my_secret_1 (TYPE SECRET); Execute SQL: DROP OBJECT IF EXISTS my_secret_1 (TYPE SECRET); Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Execute SQL: CREATE OBJECT my_secret_2 (TYPE SECRET) WITH (value="qwerty"); 2024-11-21T23:15:08.592154Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439875102417057872:4248], TxId: 281474976710900, task: 1. Ctx: { TraceId : 01jd8g42790chb2v0a0670k7rt. SessionId : ydb://session/3?node_id=2&id=ZGEyNTE1MjctOWY5YTY4OTMtYTYxYWUxNWItZDEwZjMyZjE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T23:15:08.593070Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439875102417057873:4249], TxId: 281474976710900, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd8g42790chb2v0a0670k7rt. SessionId : ydb://session/3?node_id=2&id=ZGEyNTE1MjctOWY5YTY4OTMtYTYxYWUxNWItZDEwZjMyZjE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439875102417057869:4151], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T23:15:08.594412Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGEyNTE1MjctOWY5YTY4OTMtYTYxYWUxNWItZDEwZjMyZjE=, ActorId: [2:7439875098122090291:4151], ActorState: ExecuteState, TraceId: 01jd8g42790chb2v0a0670k7rt, Create QueryResponse for error on request, msg: 2024-11-21T23:15:08.599140Z node 2 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jd8g41w6crjr1t30qbkrqz9b" } } } } ;request=session_id: "ydb://session/3?node_id=2&id=ZGEyNTE1MjctOWY5YTY4OTMtYTYxYWUxNWItZDEwZjMyZjE=" tx_control { tx_id: "01jd8g41w6crjr1t30qbkrqz9b" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/values`\nSELECT ownerUserId,secretId,value FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_2" } items { text_value: "qwerty" } } } } } ; 2024-11-21T23:15:08.599445Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWU4ZDI2MzctMjI2YWE4YjUtZTcyNjYyNmYtMzMzOGFjOWE=, ActorId: [2:7439875098122090283:4146], ActorState: ExecuteState, TraceId: 01jd8g41e3ejvy0q6zt9mc3sww, Create QueryResponse for error on request, msg: Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Trying to start YDB, gRPC: 3275, MsgBus: 10924 2024-11-21T23:15:11.538487Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875114468807900:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:11.538552Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001f9d/r3tmp/tmpzqbziF/pdisk_1.dat 2024-11-21T23:15:11.647750Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:11.672882Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:11.672996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:11.674227Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3275, node 3 2024-11-21T23:15:11.724984Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:11.725010Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:11.725031Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:11.725176Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10924 TClient is connected to server localhost:10924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:12.260599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:12.276692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:12.336040Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:12.559154Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:12.639640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:15.040636Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875131648678779:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:15.040743Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:15.065690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:15.099501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:15.133994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:15.164125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:15.202380Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:15.242030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:15.321934Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875131648679280:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:15.322056Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:15.322267Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875131648679285:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:15.326558Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:15.385103Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875131648679287:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:15:16.547834Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875114468807900:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:16.547903Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:16.721478Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmYzM2U0OGQtZTIzNDVjNDctYzhjOTcyMmQtNDA5OTIyNWY=, ActorId: [3:7439875135943646901:2460], ActorState: ExecuteState, TraceId: 01jd8g4ajmbtgw0zjx9j8yb248, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:15:16.235414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:16.235543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.235583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:16.235637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:16.235702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:16.235736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:16.235791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.236154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:16.305548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:16.305614Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:16.315522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:16.315614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:16.315800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:16.328941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:16.329164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:16.330124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.330340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:16.333470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.334897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.334964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.335196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:16.335244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.335286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:16.335374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.341572Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:16.457094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.457351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.457549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.457779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.457849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.460454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.460645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:16.460931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.461032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:16.461093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:16.461131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:16.464684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.464745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:16.464779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:16.466658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.466709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.466750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.466804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.470576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:16.472369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:16.472593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:16.473677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.473801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.473857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.474118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:16.474171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.474336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.474463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:16.476123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.476163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.476300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.476337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:16.476579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.476625Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:16.476692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:16.476714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.476746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:16.476786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.476812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:16.476832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:16.476870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:16.476907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:16.476928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:16.478375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.478533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.478576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:16.478629Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:16.478694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.478811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:16.481594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:16.482157Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.483623Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.498864Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T23:15:16.499095Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T23:15:16.499445Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23234, port: 23234 2024-11-21T23:15:16.500310Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T23:15:16.508193Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=user1, attributes: 1.1 2024-11-21T23:15:16.554693Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: uid=user1,dc=search,dc=yandex,dc=net 2024-11-21T23:15:16.555999Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.559440Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:15:16.560228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T23:15:16.560280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T23:15:16.644395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjc0MTE2LCJleHRlcm5hbF9hdXRoZW50aWNhdGlvbiI6ImxkYXAiLCJpYXQiOjE3MzIyMzA5MTYsInN1YiI6InVzZXIxIn0.VO4b2roekWULnAaiyPy6DYPjt-MOZGFpheEgDCBhlg-sSpOpB9GSSn3vWSyHljFl8MoseCLUZTrZ_iCWvjpQEWjq6imWfkim2t1qPZI0GViFkMAcm5aUrJMFFIuLtytfsyJHyMdqOE6uRrhOIo_gDlwD1OHLkvdTA7LU4pjYhYdIYnHwmsMzcoP9e2IBJfgNcYhRGCAmnUwmTJQ8s9qQujrcpp6fXtX3dNkpvLubE66E4r_sCo0iZUtGGWby1GRdtuwULu6JiT5eOuGsjnrXfRFlRAoCX1I-y-9rezwO-mcdhn-jUrNSu5m-ZEudYrrABFamO65net1394zGF3CdBQ", at schemeshard: 72057594046678944 2024-11-21T23:15:16.645335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.645402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.645587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.645632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T23:15:16.647066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(2): 2024-11-21T23:15:16.460600Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:15:16.645006Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T23:15:16.645006Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1@ldap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:15:16.331399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:16.331506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.331541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:16.331598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:16.331652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:16.331679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:16.331749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.332091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:16.406303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:16.406365Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:16.417207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:16.417306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:16.417504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:16.429651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:16.429884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:16.430584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.430799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:16.433636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.435015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.435083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.435299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:16.435351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.435397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:16.435493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.441951Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:16.571782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.572053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.572248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.572474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.572550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.574984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.575193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:16.575481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.575583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:16.575640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:16.575692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:16.577626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.577688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:16.577722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:16.579498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.579546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.579592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.579647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.583654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:16.585854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:16.586073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:16.587225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.587368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.587425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.587751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:16.587817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.588026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.588138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:16.590365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.590434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.590687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.590732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:16.591064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.591105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:16.591203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:16.591232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.591278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:16.591342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.591381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:16.591409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:16.591491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:16.591530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:16.591563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:16.593550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.593652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.593701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:16.593771Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:16.593829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.593935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:16.597234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:16.597775Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:15:16.598349Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.612886Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:272:2264]) 2024-11-21T23:15:16.615657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.621294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.621455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:15:16.621488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:16.621555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.621607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:15:16.621643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:16.621681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:15:16.621722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T23:15:16.621763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T23:15:16.622503Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.625619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.625823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T23:15:16.626053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.626089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.626216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.626251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:15:16.627127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:16.627229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:16.627258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:16.627286Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T23:15:16.627376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.627477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:15:16.627721Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:15:16.629499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T23:15:16.629970Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T23:15:16.632210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T23:15:16.632257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T23:15:16.713467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjc0MTE2LCJpYXQiOjE3MzIyMzA5MTYsInN1YiI6InVzZXIxIn0.5AqvAOO0LkHn5puVyGSGt_UkCNCArlRAcQPXfYQIQcqTMBTFss2lXUjQh8pELcHmvC6oXe258cNyvC5OImXU_-Et31B9Kk0PsuzNz6mHnlUsxnOnvim5_LXLaMBR_cognWrs5x7CrmKJa6IbhQN84lYr8lZwfKm-vRHL7W5PfSw2dYgFugFskWugjY0I9aFgITqpuKT_T06MO7fa_6xp1NnLClY2o3qW6J0xdsAFWvJv3Ovd95SHv1lvdZDNCF93QFiEEWyzlGld_noJDhumHhJ0ceH56Iqp0RfGoCpDKPagoDBtCClDDtb1V-0ZRvklh_9I5gxhqel8b7JLp-4Rsw", at schemeshard: 72057594046678944 2024-11-21T23:15:16.713765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.713822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.714032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.714091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T23:15:16.715999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T23:15:16.716740Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:16.716984Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 210us result status StatusSuccess 2024-11-21T23:15:16.717471Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA84+Ckg8K1xwxSzbyLdDK\nPjLi+5xrXYsDCMvuwmt5iB4u0/jAPQ/bM0GRncS76D6g4vfql6xmtun/Boq57JDm\nmy5ErdsVpr30coHJ3tsZwOQaAsJQmLIPL+CH21aBlHkXyYO17T6LT3AF0ooO056m\nh08psZHPDz8JKTX+rueCGV61HTsVUh6NRP8u7D7Wp+IIjcb2uzyueEyr0sMwO9cX\nhJHzDB6nyPWyThYjAM8HatLxz99cxW+Q+H+xS0uzEM/tAL4GEWoGZvPB5AC2sT+4\nGMYZWhdM1FfPTlckbkQzi5qkQqGcHByhaxq9ylYzpJfDL9vebpSOxV9CHlNYEONC\n+QIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732317316701 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.718030Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout 2024-11-21T23:15:16.718090Z node 1 :HTTP ERROR: Logout: No ydb_session_id cookie 2024-11-21T23:15:16.718448Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout 2024-11-21T23:15:16.719260Z node 1 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2024-11-21T23:15:16.719335Z node 1 :HTTP ERROR: Logout: Token is not in correct format 2024-11-21T23:15:16.719837Z node 1 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2024-11-21T23:15:16.575138Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:15:16.625769Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T23:15:16.715033Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 2024-11-21T23:15:16.720806Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJh****4Rsw (113EC936), operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2024-11-21T23:15:16.720806Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJh****4Rsw (113EC936), operation=LOGOUT, status=SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:15:15.911367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:15.911506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:15.911552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:15.911609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:15.911677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:15.911716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:15.911768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:15.912121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:15.989213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:15.989273Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:16.000047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:16.000142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:16.000320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:16.012983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:16.013229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:16.014073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.014294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:16.017345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.018719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.018795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.019021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:16.019066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.019122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:16.019221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.026012Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:16.167639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.167928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.168158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.168425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.168512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.174054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.174302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:16.174601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.174697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:16.174759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:16.174798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:16.183124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.183203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:16.183253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:16.187437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.187516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.187564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.187628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.200639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:16.202210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:16.202376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:16.203200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.203312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.203356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.203573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:16.203614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.203742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.203824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:16.205747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.205797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.205984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.206047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:16.206356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.206419Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:16.206518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:16.206552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.206603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:16.206662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.206703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:16.206733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:16.206826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:16.206868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:16.206904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:16.208621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.208741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.208786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:16.208848Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:16.208905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.209020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:16.212080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:16.212586Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.213994Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.229187Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T23:15:16.229411Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T23:15:16.229720Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:19052, port: 19052 2024-11-21T23:15:16.230457Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T23:15:16.237105Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=user1, attributes: 1.1 2024-11-21T23:15:16.282639Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: uid=user1,dc=search,dc=yandex,dc=net 2024-11-21T23:15:16.283065Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:19052. Invalid credentials 2024-11-21T23:15:16.284177Z node 1 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2024-11-21T23:15:16.284754Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.287647Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T23:15:16.174216Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:15:16.284012Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:19052. Invalid credentials, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T23:15:16.284012Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP login failed for user uid=user1,dc=search,dc=yandex,dc=net on server ldap://localhost:19052. Invalid credentials, login_user=user1@ldap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:15:15.727006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:15.727081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:15.727112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:15.727167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:15.727214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:15.727240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:15.727285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:15.727619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:15.805073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:15.805143Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:15.815701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:15.815796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:15.815960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:15.827804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:15.828009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:15.828648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:15.828896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:15.833075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:15.834363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:15.834447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:15.834667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:15.834719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:15.834764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:15.834850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:15.841095Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:15.954756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:15.955006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:15.955193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:15.955412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:15.955500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:15.957943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:15.958144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:15.958414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:15.958512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:15.958552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:15.958591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:15.960565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:15.960624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:15.960668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:15.962278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:15.962329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:15.962371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:15.962450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:15.965991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:15.967858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:15.968052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:15.969095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:15.969235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:15.969298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:15.969576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:15.969630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:15.969807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:15.969898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:15.971910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:15.971955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:15.972140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:15.972186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:15.972508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:15.972551Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:15.972653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:15.972688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:15.972738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:15.972796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:15.972840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:15.972869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:15.972934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:15.972984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:15.973023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:15.982265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:15.982543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:15.982602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:15.982661Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:15.982721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:15.982886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:15.986352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:15.986890Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:15.988365Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.004155Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T23:15:16.004394Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T23:15:16.004707Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:11336, port: 11336 2024-11-21T23:15:16.005485Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T23:15:16.028059Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:11336. Invalid credentials 2024-11-21T23:15:16.029147Z node 1 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2024-11-21T23:15:16.029625Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.031994Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T23:15:15.958096Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:15:16.029007Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:11336. Invalid credentials, login_user=user1@ldap AUDIT LOG checked line: 2024-11-21T23:15:16.029007Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:11336. Invalid credentials, login_user=user1@ldap >> TInterconnectTest::TestAddressResolve >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TInterconnectTest::TestPingPongThroughSubChannel >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::OldNbs [GOOD] |84.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BasicLogin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:16.004602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:16.004679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.004709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:16.004750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:16.004786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:16.004814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:16.004881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.005247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:16.082808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:16.082879Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:16.111970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:16.119865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:16.120137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:16.144472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:16.150604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:16.151358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.151806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:16.156949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.158427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.158499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.158764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:16.158840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.158900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:16.159015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.179222Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:16.310202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.310466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.310660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.310865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.310921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.313048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.313199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:16.313399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.313461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:16.313514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:16.313548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:16.315059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.315103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:16.315129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:16.316622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.316681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.316720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.316770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.320427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:16.321959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:16.322168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:16.322957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.323076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.323125Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.323322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:16.323371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.323528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.323596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:16.325015Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.325044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.325192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.325228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:16.325534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.325569Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:16.325645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:16.325669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.325696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:16.325721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.325766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:16.325795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:16.325840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:16.325864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:16.325889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:16.327363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.327571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.327625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:16.327682Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:16.327710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.327787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:16.330229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:16.330684Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:15:16.331147Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.344733Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:15:16.346940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.351524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.351658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:15:16.351693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:16.351756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.351810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:15:16.351845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:16.351879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:15:16.351926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T23:15:16.351961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T23:15:16.353050Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.360818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.360970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T23:15:16.361317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.361365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.361526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.361575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:15:16.362282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:16.362423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:16.362464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:16.362507Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T23:15:16.362544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.362645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:15:16.362814Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:15:16.366598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T23:15:16.366972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T23:15:16.367023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T23:15:16.401386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjc0MTE2LCJpYXQiOjE3MzIyMzA5MTYsInN1YiI6InVzZXIxIn0.1cOq7g7C2nBZMsj9JrrhqMfbHKcv2rxKbb7GiyK_PESVM6xKcH9GDzBAk64qvEqYAma9fuqtdAqB0FVjQqdkATLzCWSNrJ3w75YSMhav-_GrlgaX9naMb-rsJ80C4WfM3bPkRst6PUlRXsx4TvFkr-jHAW0CffKIbGr4c7PNry2Z1NslqjdGI5KC2KUFgZQxYGlUwHsRyqMJke0oGtqNVwnkO23EAiklPyeWQA4TqjAZIHrsQrFt4UYCgCvEKhOS0i3479N6SfNj64yIHdiOwpbuRoQjDPVSw_C8Jqph3nx-dX9036f71IVz7xwlipWBNhn4nIQjI5X9qDAOiJD5Lw", at schemeshard: 72057594046678944 2024-11-21T23:15:16.401607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.401639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.401781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.401809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T23:15:16.402160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2024-11-21T23:15:16.402426Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:16.402586Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 173us result status StatusSuccess 2024-11-21T23:15:16.402993Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4Qhak0un84Q6i7dCe+am\nkJ+aT+YuaotPAPylurs102C+k6Nen/Oe0nGTX5pnX5eIjzE6oIeuGLVGHiweG95h\nhHzfYhVZalKv8v81YfpMVl1vhEaq865hIZab3zHbGAmtv4TeqOU7+NfPCwVwo2bL\nOrcKds6ThPmheFZQq7mz8d8vxXJkRNcoepSJ4jgJR+Ge0eGbcvVPK1YaQdPKiJgK\nu/N1wjgO1z9EdYv/2ZyHqa9VMSy/ExhFbAhYKVahZi5v6pgueKK9zryQj91f9dV8\nCX+TSLD5bnBAzDiAKrDEDzMTGLsRl79DnwBGo0HZPBMk/Hbt2FEt5bdnK4FuPFYP\nnQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1732317316389 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:15:16.486829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:16.486916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.486951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:16.487023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:16.487087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:16.487125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:16.487181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.487536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:16.557531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:16.557589Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:16.567050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:16.567150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:16.567347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:16.578430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:16.578650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:16.579243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.579479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:16.582014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.583264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.583313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.583478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:16.583521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.583549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:16.583616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.594211Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:16.704142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.704388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.704586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.704791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.704853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.706966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.707182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:16.707421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.707529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:16.707572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:16.707604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:16.709807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.709866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:16.709901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:16.714438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.714502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.714551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.714611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.718233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:16.720687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:16.720891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:16.721982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.722134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.722219Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.722532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:16.722599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.722800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.722940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:16.724980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.725024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.725223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.725266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:16.725579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.725621Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:16.725712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:16.725741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.725789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:16.725843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.725880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:16.725905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:16.725968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:16.726018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:16.726049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:16.727917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.728023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.728066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:16.728220Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:16.728270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.728356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:16.731601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:16.731955Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.733020Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.746064Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:274:2266]) 2024-11-21T23:15:16.746247Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T23:15:16.746604Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:25230, port: 25230 2024-11-21T23:15:16.747421Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2024-11-21T23:15:16.760549Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=bad_user, attributes: 1.1 2024-11-21T23:15:16.760920Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:25230 return no entries 2024-11-21T23:15:16.761736Z node 1 :HTTP ERROR: Login fail for bad_user@ldap: Could not login via LDAP 2024-11-21T23:15:16.762132Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.764565Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2024-11-21T23:15:16.707137Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:15:16.761624Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:25230 return no entries, login_user=bad_user@ldap AUDIT LOG checked line: 2024-11-21T23:15:16.761624Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: LDAP user bad_user does not exist. LDAP search for filter uid=bad_user on server ldap://localhost:25230 return no entries, login_user=bad_user@ldap |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 12149, MsgBus: 30070 2024-11-21T23:14:31.307301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874941577165835:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:31.307556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011bd/r3tmp/tmpZdpHU8/pdisk_1.dat 2024-11-21T23:14:32.096735Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:32.106283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:32.106415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:32.108460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12149, node 1 2024-11-21T23:14:32.276918Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:32.276952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:32.276959Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:32.277080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30070 TClient is connected to server localhost:30070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:32.898072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:32.913279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:32.931447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:33.083857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:33.700167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:34.194254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:36.306825Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874941577165835:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:36.306939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:39.508080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874975936905787:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:39.508232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.173735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.268546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.328346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.359271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.395431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.497971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.667554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874980231873600:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.667617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.673090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874980231873605:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.684084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:14:40.704238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874980231873607:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:14:42.726357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:14:42.754202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20084, MsgBus: 7597 2024-11-21T23:14:43.659666Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874991647475871:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:43.661675Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011bd/r3tmp/tmpPECvX2/pdisk_1.dat 2024-11-21T23:14:43.851652Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:43.870353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:43.870673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:43.872786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20084, node 2 2024-11-21T23:14:43.932878Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:43.932910Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:43.932919Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:43.933032Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7597 TClient is connected to server localhost:7597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:44.404264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:44.411214Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:14:44.425176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:14:44.495576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:14:44.678185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, sub ... 1474976715671:1, at schemeshard: 72057594046644480 2024-11-21T23:14:51.686638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T23:14:52.519802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2024-11-21T23:14:53.423379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T23:14:54.353647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T23:14:55.064775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T23:14:55.891439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T23:14:55.960577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T23:14:58.613480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T23:14:58.622702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T23:14:58.822593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:14:58.822621Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 5851, MsgBus: 8819 2024-11-21T23:14:59.665643Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875060360538337:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:59.665692Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011bd/r3tmp/tmpk22CJo/pdisk_1.dat 2024-11-21T23:14:59.823074Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:59.837973Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:59.838062Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:59.841805Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5851, node 3 2024-11-21T23:14:59.901263Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:59.901286Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:59.901291Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:59.901383Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8819 TClient is connected to server localhost:8819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:00.455973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:00.468649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:00.548313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:00.747168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:00.830816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:03.578451Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875077540409216:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:03.578544Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:03.610097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:03.641534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:03.687747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:03.726677Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:03.761863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:03.843366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:03.885952Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875077540409718:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:03.886029Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:03.886123Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875077540409723:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:03.889914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:03.901398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875077540409725:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:15:04.680540Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875060360538337:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:04.680911Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:05.054281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T23:15:05.550499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:15:06.068636Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2024-11-21T23:15:06.525314Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T23:15:07.040287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T23:15:07.550568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T23:15:08.064510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T23:15:08.092077Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T23:15:11.754345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715732:0, at schemeshard: 72057594046644480 |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:15:16.214477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:16.214544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.214574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:16.214625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:16.214676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:16.214699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:16.214746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.214997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:16.286170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:16.286235Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:16.295167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:16.295307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:16.295522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:16.307856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:16.308078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:16.308853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.309085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:16.312331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.313649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.313720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.313937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:16.313984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.314028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:16.314108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.321026Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:16.418943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.419145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.419290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.419442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.419513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.425718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.425906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:16.426163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.426246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:16.426298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:16.426339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:16.428083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.428126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:16.428155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:16.429692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.429740Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.429784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.429841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.433058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:16.434531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:16.434703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:16.435550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.435668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.435722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.435923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:16.435961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.436126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.436225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:16.437966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.438017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.438213Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.438257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:16.438581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.438632Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:16.438730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:16.438765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.438816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:16.438885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.438925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:16.438957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:16.439026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:16.439066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:16.439099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:16.440816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.440904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.440940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:16.441003Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:16.441055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.441138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:16.444020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:16.444607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:15:16.445106Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.459279Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:15:16.461609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.465876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.465997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:15:16.466030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:16.466108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.466150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:15:16.466188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:16.466214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:15:16.466242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T23:15:16.466274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T23:15:16.467104Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.470257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.470499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T23:15:16.470925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.470968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.471109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.471146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:15:16.471727Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:15:16.471871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:16.471956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:16.471991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:16.472031Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T23:15:16.472107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.472204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:15:16.473753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T23:15:16.474159Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T23:15:16.476045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T23:15:16.476103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T23:15:16.619481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzMyMjc0MTE2LCJpYXQiOjE3MzIyMzA5MTYsInN1YiI6InVzZXIxIn0.S0Vc4Roqj3r4RAyCYpAE6LGT4Ufdgs5hcxR6KOtMa_EXtFS1O5GW4_3-Z6qW72tdSY76iD_ASUeoBrgq73m3xsLKJCVzwomg1iwTKjqA5FNHOFjLHXrweFlS9J8qIKZjdbqukeeG3mLjhQ0X10QQ0Oo278cXZ7c5PJ3hrqlOqCM7KYKyjjSiMslGGus__d4hwajGgepqLDPS7BxqOY0X78bLm84e-cagFmnsUA7_4xX6JEak3to2vBjhPtuD-PbEKokypzbveS7EvOxRFSsddZHGFyCMyeKPVWJf98khV8iNub9BR_W2DdpmRfI-TQ_lmJm1IRGipcxj4BePIf9jhg", at schemeshard: 72057594046678944 2024-11-21T23:15:16.619759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.619810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.620007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.620066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T23:15:16.621784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2024-11-21T23:15:16.425858Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:15:16.470429Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T23:15:16.620862Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 AUDIT LOG checked line: 2024-11-21T23:15:16.620862Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLoginBadPassword [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:125:2058] recipient: [1:107:2139] 2024-11-21T23:15:16.242723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:16.242812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.242878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:16.242936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:16.242989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:16.243036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:16.243111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:16.243478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:16.310330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:16.310406Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:16.320956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:16.321047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:16.321217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:16.333765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:16.333982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:16.334679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.334932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:16.338180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.339608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.339670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.339869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:16.339919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.339961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:16.340066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.345304Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:16.473303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.473493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.473625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.473793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.473837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.475680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.475866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:16.476069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.476168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:16.476206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:16.476251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:16.477773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.477820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:16.477854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:16.479423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.479472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.479511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.479553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.482731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:16.484478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:16.484669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:16.486008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:16.486128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.486203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.486488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:16.486546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:16.486727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.486871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:16.488792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.488841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.489063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.489112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:16.489519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:16.489564Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:16.489660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:16.489696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.489744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:16.489804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:16.489841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:16.489869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:16.489930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:16.489977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:16.490016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:16.492053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.492158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:16.492201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:16.492257Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:16.492305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.492392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:15:16.495314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:15:16.495823Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:15:16.496301Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:15:16.512085Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:15:16.517193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:16.522097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:16.522261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:15:16.522299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:16.522386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:16.522467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:15:16.522516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:16.522554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:15:16.522588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2024-11-21T23:15:16.522625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T23:15:16.523543Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:15:16.526977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:16.527193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2024-11-21T23:15:16.527539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.527582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.527731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.527777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:15:16.528372Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:15:16.528524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:16.528614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:16.528656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:16.528696Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T23:15:16.528784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:16.528891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:15:16.530758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2024-11-21T23:15:16.531211Z node 1 :HTTP WARN: 127.0.0.1:0 POST /login 2024-11-21T23:15:16.533240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoExecute at schemeshard: 72057594046678944 2024-11-21T23:15:16.533288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2024-11-21T23:15:16.579756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin DoComplete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2024-11-21T23:15:16.579950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:16.579991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:16.580181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:16.580251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 0, path id: 1 2024-11-21T23:15:16.581487Z node 1 :HTTP ERROR: Login fail for user1: Invalid password 2024-11-21T23:15:16.582036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2024-11-21T23:15:16.475810Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2024-11-21T23:15:16.527137Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user=user1 2024-11-21T23:15:16.581258Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Invalid password, login_user=user1 AUDIT LOG checked line: 2024-11-21T23:15:16.581258Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Invalid password, login_user=user1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2024-11-21T23:13:28.966431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T23:13:28.966517Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T23:13:28.966745Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:32:2064] 2024-11-21T23:13:28.966788Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T23:13:28.966869Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:33:2065] 2024-11-21T23:13:28.966915Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2024-11-21T23:13:28.967174Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:33:2065] 2024-11-21T23:13:28.967216Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2024-11-21T23:13:28.967320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:28.967877Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:39:2067] 2024-11-21T23:13:28.967920Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant 2024-11-21T23:13:28.968032Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:39:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:13:28.968210Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:40:2067] 2024-11-21T23:13:28.968241Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/tenant 2024-11-21T23:13:28.968285Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:40:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:13:28.968401Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2067] 2024-11-21T23:13:28.968428Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/tenant 2024-11-21T23:13:28.968462Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:41:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:13:28.968542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2024-11-21T23:13:28.968626Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:39:2067] 2024-11-21T23:13:28.968703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2024-11-21T23:13:28.968743Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:40:2067] 2024-11-21T23:13:28.968778Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2024-11-21T23:13:28.968831Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2067] 2024-11-21T23:13:28.968914Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:36:2067] 2024-11-21T23:13:28.968996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:37:2067] 2024-11-21T23:13:28.969045Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/tenant] Set up state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:28.969132Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2067] 2024-11-21T23:13:28.969187Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2067][/root/tenant] Ignore empty state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2024-11-21T23:13:28.969388Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:32:2064], cookie# 0, event size# 103 2024-11-21T23:13:28.969435Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2024-11-21T23:13:28.969515Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2024-11-21T23:13:28.969677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2024-11-21T23:13:28.969747Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:39:2067] 2024-11-21T23:13:28.969811Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:36:2067] 2024-11-21T23:13:28.969874Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2067][/root/tenant] Update to strong state: owner# [1:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2024-11-21T23:13:29.443264Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:32:2064] 2024-11-21T23:13:29.443329Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Successful handshake: owner# 800, generation# 1 2024-11-21T23:13:29.443472Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:33:2065] 2024-11-21T23:13:29.443507Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2024-11-21T23:13:29.443576Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:32:2064] 2024-11-21T23:13:29.443606Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Commit generation: owner# 800, generation# 1 2024-11-21T23:13:29.443804Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:33:2065] 2024-11-21T23:13:29.443834Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2024-11-21T23:13:29.443951Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:13:29.444359Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:39:2067] 2024-11-21T23:13:29.444403Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Upsert description: path# /root/tenant 2024-11-21T23:13:29.444491Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Subscribe: subscriber# [3:39:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:13:29.444646Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:40:2067] 2024-11-21T23:13:29.444671Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/tenant 2024-11-21T23:13:29.444717Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:40:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:13:29.444884Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2067] 2024-11-21T23:13:29.444914Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Upsert description: path# /root/tenant 2024-11-21T23:13:29.444957Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Subscribe: subscriber# [3:41:2067], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:13:29.445018Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2024-11-21T23:13:29.445097Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:39:2067] 2024-11-21T23:13:29.445146Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2024-11-21T23:13:29.445201Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:40:2067] 2024-11-21T23:13:29.445297Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:41:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:9:2056] 2024-11-21T23:13:29.445346Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2067] 2024-11-21T23:13:29.445437Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:36:2067] 2024-11-21T23:13:29.445512Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:37:2067] 2024-11-21T23:13:29.445560Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:35:2067][/root/tenant] Set up state: owner# [3:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:13:29.445611Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:35:2067][/ ... omainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2024-11-21T23:15:14.613484Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:32:2064] 2024-11-21T23:15:14.613550Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Successful handshake: owner# 910, generation# 1 2024-11-21T23:15:14.613671Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:32:2064] 2024-11-21T23:15:14.613700Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Commit generation: owner# 910, generation# 1 2024-11-21T23:15:14.613746Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:33:2065] 2024-11-21T23:15:14.613776Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Successful handshake: owner# 910, generation# 1 2024-11-21T23:15:14.613956Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:33:2065] 2024-11-21T23:15:14.613984Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Commit generation: owner# 910, generation# 1 2024-11-21T23:15:14.614060Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:15:14.614490Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:39:2067] 2024-11-21T23:15:14.614524Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T23:15:14.614606Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Subscribe: subscriber# [397:39:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:15:14.614755Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:40:2067] 2024-11-21T23:15:14.614781Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T23:15:14.614820Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Subscribe: subscriber# [397:40:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:15:14.614953Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2067] 2024-11-21T23:15:14.614979Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T23:15:14.615013Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Subscribe: subscriber# [397:41:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:15:14.615073Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:39:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2024-11-21T23:15:14.615121Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:39:2067] 2024-11-21T23:15:14.615166Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:40:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2024-11-21T23:15:14.615201Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:40:2067] 2024-11-21T23:15:14.615235Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:41:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2024-11-21T23:15:14.615270Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2067] 2024-11-21T23:15:14.615341Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:36:2067] 2024-11-21T23:15:14.615412Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:37:2067] 2024-11-21T23:15:14.615471Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][397:35:2067][/Root/Tenant/table_inside] Set up state: owner# [397:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:15:14.615523Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2067] 2024-11-21T23:15:14.615561Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: [main][397:35:2067][/Root/Tenant/table_inside] Ignore empty state: owner# [397:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2024-11-21T23:15:15.090122Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:32:2064] 2024-11-21T23:15:15.090175Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Successful handshake: owner# 910, generation# 1 2024-11-21T23:15:15.090273Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:32:2064] 2024-11-21T23:15:15.090301Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Commit generation: owner# 910, generation# 1 2024-11-21T23:15:15.090339Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:33:2065] 2024-11-21T23:15:15.090363Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Successful handshake: owner# 910, generation# 1 2024-11-21T23:15:15.090599Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:33:2065] 2024-11-21T23:15:15.090641Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Commit generation: owner# 910, generation# 1 2024-11-21T23:15:15.090744Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:15:15.091277Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:39:2067] 2024-11-21T23:15:15.091314Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T23:15:15.091403Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Subscribe: subscriber# [399:39:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:15:15.091625Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:40:2067] 2024-11-21T23:15:15.091660Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T23:15:15.091714Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Subscribe: subscriber# [399:40:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:15:15.091888Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2067] 2024-11-21T23:15:15.091918Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2024-11-21T23:15:15.091966Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Subscribe: subscriber# [399:41:2067], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2024-11-21T23:15:15.092038Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:39:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2024-11-21T23:15:15.092097Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:39:2067] 2024-11-21T23:15:15.092151Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:40:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2024-11-21T23:15:15.092203Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:40:2067] 2024-11-21T23:15:15.092250Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:41:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2024-11-21T23:15:15.092297Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2067] 2024-11-21T23:15:15.092384Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:36:2067] 2024-11-21T23:15:15.092473Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:37:2067] 2024-11-21T23:15:15.092528Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][399:35:2067][/Root/Tenant/table_inside] Set up state: owner# [399:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:15:15.092590Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:35:2067][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2067] 2024-11-21T23:15:15.092642Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: [main][399:35:2067][/Root/Tenant/table_inside] Ignore empty state: owner# [399:34:2066], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12319, MsgBus: 18983 2024-11-21T23:14:15.405994Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874874259530550:2159];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:15.406582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fac/r3tmp/tmpaCGJFP/pdisk_1.dat 2024-11-21T23:14:16.027625Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:16.075415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:16.075532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:16.082853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12319, node 1 2024-11-21T23:14:16.526972Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:16.526996Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:16.527003Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:16.527100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18983 TClient is connected to server localhost:18983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:17.124197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:14:17.183107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:14:17.425412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:17.584737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:17.666686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:19.627275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874891439401342:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:19.627404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:20.219807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:20.341109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:20.402524Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874874259530550:2159];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:20.402774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:20.419069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:14:20.522737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:14:20.598585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:14:20.743979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:14:20.931918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874895734369153:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:20.932006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:20.932445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874895734369158:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:20.937702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:14:20.986680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874895734369160:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:14:31.023221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:14:31.023269Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 7327, MsgBus: 31959 2024-11-21T23:14:37.477664Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874967472836363:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:37.477770Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fac/r3tmp/tmpp5M92S/pdisk_1.dat 2024-11-21T23:14:37.607663Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:37.628357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:37.628428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:37.631853Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7327, node 2 2024-11-21T23:14:37.766063Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:37.766086Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:37.766093Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:37.766179Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31959 TClient is connected to server localhost:31959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:38.191352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:38.205965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:38.282935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:38.469647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:38.579418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:41.006700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874980357739947:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you ... 72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:14:52.653999Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:14:52.654036Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:14:52.654209Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:14:52.654238Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:14:52.654483Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:14:52.654517Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:14:52.654745Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:14:52.654774Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:14:52.654874Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:14:52.654899Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:14:52.891791Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:14:52.891873Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:14:52.891975Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:14:52.892036Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:14:52.892240Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:14:52.892270Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:14:52.892366Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:14:52.892413Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:14:52.892486Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:14:52.892512Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:14:52.892555Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:14:52.892599Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:14:52.892985Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:14:52.893028Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:14:52.893209Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:14:52.893236Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:14:52.893399Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:14:52.893449Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:14:52.893658Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:14:52.893690Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:14:52.893789Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:14:52.893823Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:14:52.911100Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:14:52.911169Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:14:52.911280Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:14:52.911326Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:14:52.911566Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:14:52.911605Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:14:52.911715Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:14:52.911754Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:14:52.911816Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:14:52.911840Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:14:52.911882Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:14:52.911913Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:14:52.912313Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:14:52.912383Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:14:52.912617Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:14:52.912648Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:14:52.912771Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:14:52.912796Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:14:52.913001Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:14:52.913031Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:14:52.913175Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:14:52.913217Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 7261, MsgBus: 27218 2024-11-21T23:14:31.978046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874941552519676:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:31.979104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011cf/r3tmp/tmp7N1GjA/pdisk_1.dat 2024-11-21T23:14:32.663443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:32.663580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:32.668690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:32.739270Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7261, node 1 2024-11-21T23:14:33.001951Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:33.001978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:33.001985Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:33.002059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27218 TClient is connected to server localhost:27218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:35.514712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:36.974700Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874941552519676:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:36.974834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:39.561250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874975912258601:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:39.562786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.358175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.620327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.677982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.749171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:40.843996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874980207226212:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.844106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.844545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874980207226217:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.850327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2024-11-21T23:14:40.886363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874980207226219:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } Trying to start YDB, gRPC: 14408, MsgBus: 13591 2024-11-21T23:14:43.664419Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874995555632891:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:43.664779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011cf/r3tmp/tmpru5cdM/pdisk_1.dat 2024-11-21T23:14:43.957766Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:44.008389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:44.008496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:44.015808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14408, node 2 2024-11-21T23:14:44.178917Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:44.178937Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:44.178943Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:44.179032Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13591 TClient is connected to server localhost:13591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:45.100907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:48.651563Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874995555632891:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:48.655223Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:48.934244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875017030469919:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:48.934562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:48.958067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:14:49.037487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:14:49.149171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:49.198815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:49.267022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875021325437522:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:49.267179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:49.273009Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875021325437527:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:49.276821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2024-11-21T23:14:49.335757Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2024-11-21T23:14:49.336077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875021325437529:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking }
: Info: Success, code: 4 2024-11-21T23:14:49.895802Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found Trying to start YDB, gRPC: 17251, MsgBus: 14424 2024-11-21T23:14:51.160862Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875026607971971:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:51.160899Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0011cf/r3tmp/tmpvMc93I/pdisk_1.dat 2024-11-21T23:14:51.456086Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:51.456204Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:51.458829Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:51.479589Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17251, node 3 2024-11-21T23:14:51.621190Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:51.621214Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:51.621222Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:51.621307Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14424 TClient is connected to server localhost:14424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:52.522103Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:52.543780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [FAIL] Test command err: Trying to start YDB, gRPC: 5697, MsgBus: 31853 2024-11-21T23:14:19.130915Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874891476936629:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:19.131440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fa0/r3tmp/tmpLw6lFr/pdisk_1.dat 2024-11-21T23:14:20.075817Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:20.233326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:20.233409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:20.234226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:14:20.243414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5697, node 1 2024-11-21T23:14:20.455094Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:20.455119Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:20.455130Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:20.455237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31853 TClient is connected to server localhost:31853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:22.061544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:22.127465Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:24.124955Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874891476936629:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:24.156972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:27.564489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874925836675501:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:27.564656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:28.431033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:14:28.664412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:14:28.670368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:14:28.670673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:14:28.670782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:14:28.670896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:14:28.670996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:14:28.671110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:14:28.671209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:14:28.671306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:14:28.671401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:14:28.671520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:14:28.671618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7439874930131642978:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:14:28.681679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:14:28.682476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:14:28.682738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:14:28.682835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:14:28.682925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:14:28.683038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:14:28.683124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:14:28.683211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:14:28.683298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:14:28.683380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:14:28.683459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:14:28.683559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7439874930131642953:2318];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:14:28.711601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:14:28.711690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:14:28.711810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:14:28.711837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:14:28.712051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished ... ponse: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:36.586890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:36.597453Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:38.072855Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439874949911348923:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:38.072927Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:42.935595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874988566055255:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:42.935684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:42.972293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:14:43.245723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:14:43.516836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874992861023877:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:43.520287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874992861023873:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:43.520441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:43.521417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T23:14:43.532664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874992861023879:2428], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } Trying to start YDB, gRPC: 29840, MsgBus: 25762 2024-11-21T23:14:47.308192Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875008857277669:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:47.308248Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fa0/r3tmp/tmpqMgdzO/pdisk_1.dat 2024-11-21T23:14:47.402292Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:47.421297Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:47.421380Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:47.423120Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29840, node 3 2024-11-21T23:14:47.482049Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:47.482087Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:47.482097Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:47.482266Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25762 TClient is connected to server localhost:25762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:48.066846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:52.001571Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875026037147472:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:52.001676Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:52.046061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:14:52.274709Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:14:52.364061Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875008857277669:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:52.364490Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:52.507868Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875030332116071:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:52.507972Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:52.508415Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875030332116076:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:52.513866Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T23:14:52.533795Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875030332116078:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T23:14:55.559018Z node 3 :RPC_REQUEST WARN: Client lost 2024-11-21T23:15:02.389183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:02.389223Z node 3 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:4329, virtual void NKikimr::NKqp::NTestSuiteKqpQueryService::TTestCaseTableSink_Oltp_Replace::Execute_(NUnitTest::TTestContext &) [UseSink = false]: (prepareResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1A9F1CBF 1. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:4329: Execute_ @ 0x1A0A2F1A 2. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24: operator() @ 0x19FF3B47 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24:1) &> @ 0x19FF3B47 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24:1) &> @ 0x19FF3B47 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x19FF3B47 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x19FF3B47 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x1AA30BB8 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x1AA30BB8 9. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x1AA30BB8 10. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1A9F8828 11. /-S/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp:24: Execute @ 0x19FF2D13 12. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1A9FA0F5 13. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1AA2A7FC 14. ??:0: ?? @ 0x7FAC9608BD8F 15. ??:0: ?? @ 0x7FAC9608BE3F 16. ??:0: ?? @ 0x17C38028 >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2024-11-21T23:14:51.422601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875027978672181:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:51.422876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002be1/r3tmp/tmpH3Ygde/pdisk_1.dat 2024-11-21T23:14:52.083618Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:52.114576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:52.114668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:52.128435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9819, node 1 2024-11-21T23:14:52.298647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:52.298678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:52.298688Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:52.298802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:52.800223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:52.879293Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:52.883595Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2024-11-21T23:14:52.883676Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Connect to grpc://localhost:11116 2024-11-21T23:14:52.887702Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2024-11-21T23:14:52.911881Z node 1 :GRPC_CLIENT DEBUG: [5160000402d0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:14:52.912220Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T23:14:52.913493Z node 1 :GRPC_CLIENT DEBUG: [5160000408d0] Connect to grpc://localhost:5315 2024-11-21T23:14:52.914586Z node 1 :GRPC_CLIENT DEBUG: [5160000408d0] Request GetUserAccountRequest { user_account_id: "user1" } 2024-11-21T23:14:52.931991Z node 1 :GRPC_CLIENT DEBUG: [5160000408d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2024-11-21T23:14:52.932410Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T23:14:56.832249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875047913557855:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:56.832308Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002be1/r3tmp/tmpwL7XSz/pdisk_1.dat 2024-11-21T23:14:56.941664Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20170, node 2 2024-11-21T23:14:57.007987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:57.008136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:57.064245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:57.121059Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:57.121092Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:57.121098Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:57.121272Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:57.418759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:57.449250Z node 2 :TICKET_PARSER ERROR: Ticket **** (8E120919): Token is not supported 2024-11-21T23:15:00.793659Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875064659376479:2120];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:00.818410Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002be1/r3tmp/tmpvrNluI/pdisk_1.dat 2024-11-21T23:15:00.897436Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:00.914938Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:00.915022Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:00.916024Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23692, node 3 2024-11-21T23:15:00.961852Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:00.961882Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:00.961892Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:00.962062Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:01.285135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:01.301656Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:15:01.304042Z node 3 :TICKET_PARSER ERROR: Ticket **** (8E120919): Unknown token 2024-11-21T23:15:04.699077Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439875082062719809:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:04.699286Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002be1/r3tmp/tmpSYScSf/pdisk_1.dat 2024-11-21T23:15:04.804058Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:04.826320Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:04.826422Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:04.827855Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63324, node 4 2024-11-21T23:15:04.874658Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:04.874697Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:04.874705Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:04.874887Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72 ... t { id: "user1" } } } 2024-11-21T23:15:05.156489Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.connect now has a valid subject "user1@as" 2024-11-21T23:15:05.156612Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T23:15:05.157053Z node 4 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:05.157221Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Request AuthorizeRequest { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } api_key: "ApiK****alid (AB5B5EA8)" } 2024-11-21T23:15:05.158824Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Response AuthorizeResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2024-11-21T23:15:05.158932Z node 4 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) permission something.read now has a valid subject "ApiKey-value-valid@as" 2024-11-21T23:15:05.159011Z node 4 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2024-11-21T23:15:05.159314Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T23:15:05.159453Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:05.160976Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Status 16 Access Denied 2024-11-21T23:15:05.161177Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:05.161213Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2024-11-21T23:15:05.161730Z node 4 :TICKET_PARSER TRACE: Ticket **** (E2D1584C) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:05.161881Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Request AuthorizeRequest { iam_token: "**** (E2D1584C)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:05.167011Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Status 16 Access Denied 2024-11-21T23:15:05.167139Z node 4 :TICKET_PARSER TRACE: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:05.167206Z node 4 :TICKET_PARSER DEBUG: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2024-11-21T23:15:05.167634Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:05.167760Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:05.169316Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Status 16 Access Denied 2024-11-21T23:15:05.169442Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:05.169474Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2024-11-21T23:15:05.169870Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:05.170008Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2024-11-21T23:15:05.171517Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Status 16 Access Denied 2024-11-21T23:15:05.171611Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:05.171641Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2024-11-21T23:15:05.171950Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:05.172076Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:05.173380Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:05.173463Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T23:15:05.173544Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T23:15:05.173870Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:05.173993Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2024-11-21T23:15:05.175401Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:05.175528Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T23:15:05.175589Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T23:15:05.175875Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2024-11-21T23:15:05.175989Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2024-11-21T23:15:05.177341Z node 4 :GRPC_CLIENT DEBUG: [51600011bed0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:05.177421Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2024-11-21T23:15:05.177498Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T23:15:08.314912Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875100272939190:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:08.315031Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002be1/r3tmp/tmpx01PFm/pdisk_1.dat 2024-11-21T23:15:08.434257Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:08.451454Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:08.451563Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:08.457143Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15629, node 5 2024-11-21T23:15:08.496426Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:08.496474Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:08.496484Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:08.496622Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:08.743478Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:08.753085Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:08.753170Z node 5 :GRPC_CLIENT DEBUG: [5160000ac8d0] Connect to grpc://localhost:11694 2024-11-21T23:15:08.753948Z node 5 :GRPC_CLIENT DEBUG: [5160000ac8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:08.760588Z node 5 :GRPC_CLIENT DEBUG: [5160000ac8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:08.760733Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T23:15:08.760848Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T23:15:08.761217Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:08.761270Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T23:15:08.761382Z node 5 :GRPC_CLIENT DEBUG: [5160000ac8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:08.761788Z node 5 :GRPC_CLIENT DEBUG: [5160000ac8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:08.763486Z node 5 :GRPC_CLIENT DEBUG: [5160000ac8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:08.763593Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T23:15:08.763689Z node 5 :GRPC_CLIENT DEBUG: [5160000ac8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:08.763741Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2024-11-21T23:15:08.763820Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] Test command err: 2024-11-21T23:14:54.854344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875040326558770:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:54.854843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bdb/r3tmp/tmpYJgURL/pdisk_1.dat 2024-11-21T23:14:55.570469Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:55.575776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:55.575857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:55.580669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63675, node 1 2024-11-21T23:14:55.766935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:55.766972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:55.766981Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:55.767104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:56.302079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:56.327725Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:56.333066Z node 1 :TICKET_PARSER DEBUG: Ticket 9F349FD7DFCE0005EB1447B1080DAD4A1F53630C () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2024-11-21T23:14:59.670295Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875063665002860:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:59.670441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bdb/r3tmp/tmpd6mELr/pdisk_1.dat 2024-11-21T23:14:59.820978Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:59.839776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:59.839867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:59.841623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27676, node 2 2024-11-21T23:14:59.918963Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:59.918989Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:59.918996Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:59.919076Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:00.182143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:00.190608Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:15:00.193496Z node 2 :TICKET_PARSER DEBUG: Ticket D7A3843F9A63394F2F0CAD13073B0D0CCD738162 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2024-11-21T23:15:00.194436Z node 2 :TICKET_PARSER ERROR: Ticket D7A3843F9A63394F2F0CAD13073B0D0CCD738162: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2024-11-21T23:15:03.476328Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875079053185565:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:03.476403Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bdb/r3tmp/tmp1svcqt/pdisk_1.dat 2024-11-21T23:15:03.560034Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21893, node 3 2024-11-21T23:15:03.608666Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:03.608766Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:03.610187Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:03.636095Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:03.636119Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:03.636126Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:03.636246Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:03.856140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:03.863693Z node 3 :TICKET_PARSER DEBUG: Ticket 25EFF518BCD6EB79D6B479F2457E27714A843487 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2024-11-21T23:15:03.864200Z node 3 :TICKET_PARSER ERROR: Ticket 25EFF518BCD6EB79D6B479F2457E27714A843487: Cannot create token from certificate. Client certificate failed verification 2024-11-21T23:15:07.047356Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439875098678680176:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:07.047414Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bdb/r3tmp/tmpFvP340/pdisk_1.dat 2024-11-21T23:15:07.167634Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:07.192989Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:07.193091Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:07.194820Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10581, node 4 2024-11-21T23:15:07.239851Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:07.239878Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:07.239887Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:07.240035Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:07.473462Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:07.480539Z node 4 :TICKET_PARSER DEBUG: Ticket 0927BFEF2DF469EFECEAD2A6F8481B236005CC50 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2024-11-21T23:15:07.481193Z node 4 :TICKET_PARSER ERROR: Ticket 0927BFEF2DF469EFECEAD2A6F8481B236005CC50: Cannot create token from certificate. Client certificate failed verification 2024-11-21T23:15:10.540525Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875108912108968:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:10.540618Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bdb/r3tmp/tmpcBcoID/pdisk_1.dat 2024-11-21T23:15:10.656114Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:10.680477Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:10.680576Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:10.682328Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4883, node 5 2024-11-21T23:15:10.719126Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:10.719156Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:10.719165Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:10.719322Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:10.949515Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:10.962564Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2024-11-21T23:15:10.962625Z node 5 :GRPC_CLIENT DEBUG: [5160000609d0] Connect to grpc://localhost:25805 2024-11-21T23:15:10.965532Z node 5 :GRPC_CLIENT DEBUG: [5160000609d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2024-11-21T23:15:10.973198Z node 5 :GRPC_CLIENT DEBUG: [5160000609d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2024-11-21T23:15:10.973359Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2024-11-21T23:15:10.973472Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2024-11-21T23:15:10.973865Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2024-11-21T23:15:10.974104Z node 5 :GRPC_CLIENT DEBUG: [5160000609d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2024-11-21T23:15:10.975938Z node 5 :GRPC_CLIENT DEBUG: [5160000609d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2024-11-21T23:15:10.976086Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "" 2024-11-21T23:15:10.976153Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2024-11-21T23:14:55.186606Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875046286173841:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:55.186919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bd5/r3tmp/tmpAxJlGl/pdisk_1.dat 2024-11-21T23:14:55.729806Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:55.753602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:55.753906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24070, node 1 2024-11-21T23:14:55.759739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:55.823435Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:55.823457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:55.823476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:55.823584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:56.170197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:56.197591Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:56.200657Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:14:56.200795Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Connect to grpc://localhost:3912 2024-11-21T23:14:56.204588Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:14:56.238764Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Status 14 Service Unavailable 2024-11-21T23:14:56.242565Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2024-11-21T23:14:56.242618Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T23:14:56.242645Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:14:56.243090Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:14:56.254502Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Status 14 Service Unavailable 2024-11-21T23:14:56.254990Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2024-11-21T23:14:56.255021Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T23:14:57.230539Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T23:14:57.230598Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:14:57.230946Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:14:57.236597Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Status 14 Service Unavailable 2024-11-21T23:14:57.236746Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2024-11-21T23:14:57.236786Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T23:14:59.231491Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2024-11-21T23:14:59.231555Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:14:59.231850Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:14:59.239218Z node 1 :GRPC_CLIENT DEBUG: [51600003ffd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:14:59.239388Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2024-11-21T23:14:59.239554Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2024-11-21T23:15:00.186887Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875046286173841:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:00.187013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:08.876514Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875100804406515:2253];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bd5/r3tmp/tmpWRR0oW/pdisk_1.dat 2024-11-21T23:15:08.921333Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:08.968501Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:09.006129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:09.006230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30346, node 2 2024-11-21T23:15:09.008215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:09.062121Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:09.062143Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:09.062150Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:09.062254Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:09.306894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:09.314541Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:15:09.317181Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:09.317288Z node 2 :GRPC_CLIENT DEBUG: [5160001026d0] Connect to grpc://localhost:61711 2024-11-21T23:15:09.318278Z node 2 :GRPC_CLIENT DEBUG: [5160001026d0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:09.327762Z node 2 :GRPC_CLIENT DEBUG: [5160001026d0] Status 14 Service Unavailable 2024-11-21T23:15:09.328058Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2024-11-21T23:15:09.328090Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2024-11-21T23:15:09.328112Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:09.328408Z node 2 :GRPC_CLIENT DEBUG: [5160001026d0] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:09.330091Z node 2 :GRPC_CLIENT DEBUG: [5160001026d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:09.330246Z node 2 :T ... 15192, node 4 2024-11-21T23:15:15.731043Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:15.731149Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:15.732685Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:15.782546Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:15.782568Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:15.782586Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:15.782698Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:15:16.052894Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:16.060275Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:16.060372Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Connect to grpc://localhost:23960 2024-11-21T23:15:16.061314Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:16.090176Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:16.090370Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T23:15:16.090450Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T23:15:16.092137Z node 4 :GRPC_CLIENT DEBUG: [5160000477d0] Connect to grpc://localhost:2271 2024-11-21T23:15:16.093280Z node 4 :GRPC_CLIENT DEBUG: [5160000477d0] Request GetUserAccountRequest { user_account_id: "user1" } 2024-11-21T23:15:16.101551Z node 4 :GRPC_CLIENT DEBUG: [5160000477d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2024-11-21T23:15:16.102118Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T23:15:16.102665Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T23:15:16.103061Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:16.105248Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Status 16 Access Denied 2024-11-21T23:15:16.105406Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2024-11-21T23:15:16.105447Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2024-11-21T23:15:16.106282Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:16.106332Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T23:15:16.106492Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:16.107004Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:16.108963Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:16.112320Z node 4 :GRPC_CLIENT DEBUG: [516000047dd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:16.112500Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T23:15:16.112542Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2024-11-21T23:15:16.112560Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2024-11-21T23:15:16.112683Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2024-11-21T23:15:19.085813Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875148268656985:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:19.085893Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bd5/r3tmp/tmpFA0Xxz/pdisk_1.dat 2024-11-21T23:15:19.159410Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11469, node 5 2024-11-21T23:15:19.209638Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:19.209728Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:19.211201Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:19.211221Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:19.211230Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:19.211385Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:19.211428Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:19.398459Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:19.403677Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:19.403727Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T23:15:19.403759Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Connect to grpc://localhost:29777 2024-11-21T23:15:19.404561Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:19.404838Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:19.409775Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Status 14 Service Unavailable 2024-11-21T23:15:19.409865Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2024-11-21T23:15:19.410051Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2024-11-21T23:15:19.410275Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2024-11-21T23:15:19.410323Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2024-11-21T23:15:19.410346Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2024-11-21T23:15:19.410386Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2024-11-21T23:15:19.410531Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:19.410978Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2024-11-21T23:15:19.413218Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Status 1 CANCELLED 2024-11-21T23:15:19.413259Z node 5 :GRPC_CLIENT DEBUG: [5160000aecd0] Status 1 CANCELLED 2024-11-21T23:15:19.413295Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2024-11-21T23:15:19.413340Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2024-11-21T23:15:19.413363Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 24660, MsgBus: 20633 2024-11-21T23:14:18.329171Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874885192504200:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:18.329211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fa9/r3tmp/tmpYbZSFN/pdisk_1.dat 2024-11-21T23:14:18.982830Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:19.011998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:19.012092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:19.020125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24660, node 1 2024-11-21T23:14:19.254743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:19.254774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:19.254782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:19.254877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20633 TClient is connected to server localhost:20633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:20.674987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:20.795156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:21.140096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:21.412229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:21.519286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:23.331812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439874885192504200:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:23.331903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:25.791329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874915257276763:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:25.791501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:26.271239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:26.361511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:26.462534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:14:26.594431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:14:26.665126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:14:26.818425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:14:26.938781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874919552244591:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:26.938868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:26.940678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439874919552244596:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:26.945397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:14:26.969776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439874919552244598:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:14:29.002142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3034, MsgBus: 28432 2024-11-21T23:14:31.035068Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874943187702376:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fa9/r3tmp/tmppHCF1P/pdisk_1.dat 2024-11-21T23:14:31.273105Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:14:31.589522Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:31.644189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:31.644301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:31.647744Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3034, node 2 2024-11-21T23:14:31.811325Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:31.811347Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:31.811355Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:31.811496Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28432 TClient is connected to server localhost:28432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:32.645527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:32.653551Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:14:32.665713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:32.890045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:33.651541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:34.429386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:36.014555Z node 2 :METADATA_PROVIDER ERROR: ... ode 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874981842409630:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:40.815567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:41.003705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:41.064996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:41.237743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:14:41.341709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:14:41.430523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:14:41.533038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:14:41.705741Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874986137377436:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:41.705844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:41.706168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439874986137377441:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:41.710262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:14:41.729681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439874986137377443:2443], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:14:43.926855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2024-11-21T23:14:44.131379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9679, MsgBus: 24403 2024-11-21T23:14:45.208678Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875002488757211:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fa9/r3tmp/tmpMk7apU/pdisk_1.dat 2024-11-21T23:14:45.298418Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:14:45.332390Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:45.345202Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:45.350585Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:45.354918Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9679, node 3 2024-11-21T23:14:45.415569Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:45.415596Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:45.415605Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:45.415727Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24403 TClient is connected to server localhost:24403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:46.218374Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:46.273654Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:46.394375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:46.669497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:46.795024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:50.204524Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875002488757211:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:50.204610Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:51.240462Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875028258562558:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:51.240578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:51.320034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:51.384423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:51.466842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:14:51.532673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:14:51.601525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:14:51.700455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:14:51.824264Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875028258563076:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:51.824376Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:51.824712Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875028258563081:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:51.832694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:14:51.876285Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875028258563083:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:14:53.209864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 |85.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant |85.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::OldFormat >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped >> TInterconnectTest::TestBlobEvent220BytesPreSerialized |85.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent >> TestProtocols::TestResolveProtocol >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> BsControllerConfig::OverlayMap >> JsonProtoConversion::NlohmannJsonToProtoArray >> BsControllerConfig::PDiskCreate >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> TActorTracker::Basic [GOOD] >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> BsControllerConfig::ManyPDisksRestarts >> BsControllerConfig::OverlayMapCrossReferences >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> BsControllerConfig::ReassignGroupDisk |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> BsControllerConfig::OverlayMap [GOOD] >> JsonProtoConversion::JsonToProtoArray [GOOD] >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ShouldBePings |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> AggregateStatistics::ShouldBePings [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |85.1%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |85.1%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2024-11-21T23:15:25.505377Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.505632Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T23:15:25.506211Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T23:15:25.506241Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.506286Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [1:7:2054], tablet id = 1, status = ERROR 2024-11-21T23:15:25.506303Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2024-11-21T23:15:25.506379Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 2 2024-11-21T23:15:25.506440Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2024-11-21T23:15:25.506517Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 4, status = ERROR 2024-11-21T23:15:25.506574Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2024-11-21T23:15:25.506654Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 5 2024-11-21T23:15:25.506685Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2024-11-21T23:15:25.506757Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2024-11-21T23:15:25.506814Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 7, status = ERROR 2024-11-21T23:15:25.506833Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2024-11-21T23:15:25.506862Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T23:15:25.506881Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.506905Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 8 2024-11-21T23:15:25.506921Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2024-11-21T23:15:25.506938Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2024-11-21T23:15:25.521382Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.522413Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 2, status = OK 2024-11-21T23:15:25.522869Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.522975Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 3, status = OK 2024-11-21T23:15:25.523029Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.523070Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 4, status = OK 2024-11-21T23:15:25.523117Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.523165Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 5, status = OK 2024-11-21T23:15:25.523197Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.523237Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T23:15:25.523527Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [1:7:2054], tablet id = 1, status = OK 2024-11-21T23:15:25.523599Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:7:2054], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.523674Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T23:15:25.523699Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.523731Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2024-11-21T23:15:25.523912Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 6, status = OK 2024-11-21T23:15:25.523966Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.524004Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T23:15:25.524074Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 5, status = ERROR 2024-11-21T23:15:25.524096Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.524129Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 7, status = OK 2024-11-21T23:15:25.524166Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.524223Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:7:2054], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T23:15:25.524246Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.524264Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2024-11-21T23:15:25.524308Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 7, status = ERROR 2024-11-21T23:15:25.524325Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.534675Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2024-11-21T23:15:25.534770Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2024-11-21T23:15:25.534889Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2024-11-21T23:15:25.534918Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2024-11-21T23:15:25.534935Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2024-11-21T23:15:25.534975Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2024-11-21T23:15:25.535009Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2024-11-21T23:15:25.535043Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2024-11-21T23:15:25.535058Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2024-11-21T23:15:25.535078Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T23:15:25.535157Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.535200Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2024-11-21T23:15:25.535303Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T23:15:25.535343Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.535380Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T23:15:25.535395Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.535416Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T23:15:25.535434Z node 1 :STATISTICS DEBUG: Skip EvClientConnected >> TTopicApiDescribes::DescribeConsumer |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2024-11-21T23:15:25.657323Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.658376Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T23:15:25.658707Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.658808Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T23:15:25.658925Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T23:15:25.658947Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.659119Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.659393Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.659570Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T23:15:25.659638Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [3:41:2056], tablet id = 3, status = OK 2024-11-21T23:15:25.659689Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:41:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.659745Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T23:15:25.659774Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T23:15:25.659831Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [4:43:2056], tablet id = 4, status = OK 2024-11-21T23:15:25.659900Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.659958Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T23:15:25.659977Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T23:15:25.660005Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T23:15:25.660018Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.660075Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T23:15:25.660123Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.660187Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T23:15:25.660262Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T23:15:25.670599Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.670696Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T23:15:25.670814Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.670836Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T23:15:25.681414Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T23:15:25.681504Z node 1 :STATISTICS INFO: Node 2 is unavailable 2024-11-21T23:15:25.681548Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T23:15:25.681662Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.681696Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T23:15:25.681729Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.681750Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T23:15:25.681874Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.681907Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2024-11-21T23:15:25.602414Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.603663Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T23:15:25.604057Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.604167Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T23:15:25.604320Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T23:15:25.604351Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.604585Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.604927Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.605103Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T23:15:25.605173Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [3:41:2056], tablet id = 3, status = OK 2024-11-21T23:15:25.605225Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:41:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.605272Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T23:15:25.605300Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T23:15:25.605365Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [4:43:2056], tablet id = 4, status = OK 2024-11-21T23:15:25.605415Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.605459Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T23:15:25.605474Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T23:15:25.605496Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:41:2056], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T23:15:25.605508Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.605555Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:43:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T23:15:25.605565Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.605600Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T23:15:25.605654Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T23:15:25.616040Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.616129Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T23:15:25.616244Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.616271Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T23:15:25.626955Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T23:15:25.627063Z node 1 :STATISTICS INFO: Node 2 is unavailable 2024-11-21T23:15:25.627113Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T23:15:25.627277Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.627321Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2024-11-21T23:15:25.627356Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.627383Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2024-11-21T23:15:25.627530Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2024-11-21T23:15:25.627563Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2024-11-21T23:15:25.731591Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.732025Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.838717Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2024-11-21T23:15:25.838921Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T23:15:25.838997Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T23:15:25.839883Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T23:15:25.839929Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.839988Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:17:2054], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T23:15:25.840008Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.840087Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2024-11-21T23:15:25.840131Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2024-11-21T23:15:25.891807Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.892884Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [1:34:2057], tablet id = 1, status = OK 2024-11-21T23:15:25.893181Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:34:2057], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.893350Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:35:2058], server id = [1:35:2058], tablet id = 2, status = OK 2024-11-21T23:15:25.893405Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:35:2058], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.893533Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2024-11-21T23:15:25.893714Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:36:2059], server id = [1:36:2059], tablet id = 3, status = OK 2024-11-21T23:15:25.893757Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:36:2059], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.893951Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.894079Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2024-11-21T23:15:25.894239Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2024-11-21T23:15:25.894368Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:34:2057], server id = [0:0:0], tablet id = 1, status = ERROR 2024-11-21T23:15:25.894409Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.894443Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2024-11-21T23:15:25.894534Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:40:2056], server id = [2:40:2056], tablet id = 4, status = OK 2024-11-21T23:15:25.894589Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:40:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.894745Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2024-11-21T23:15:25.894812Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:43:2056], server id = [3:43:2056], tablet id = 5, status = OK 2024-11-21T23:15:25.894855Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:43:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.894885Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2024-11-21T23:15:25.895028Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:35:2058], server id = [0:0:0], tablet id = 2, status = ERROR 2024-11-21T23:15:25.895050Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.895082Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2024-11-21T23:15:25.895120Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T23:15:25.895238Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:36:2059], server id = [0:0:0], tablet id = 3, status = ERROR 2024-11-21T23:15:25.895255Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.895292Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:40:2056], server id = [0:0:0], tablet id = 4, status = ERROR 2024-11-21T23:15:25.895326Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.895369Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:45:2056], server id = [4:45:2056], tablet id = 6, status = OK 2024-11-21T23:15:25.895421Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:45:2056], path = { OwnerId: 3 LocalId: 3 } 2024-11-21T23:15:25.895446Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:43:2056], server id = [0:0:0], tablet id = 5, status = ERROR 2024-11-21T23:15:25.895472Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.895535Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2024-11-21T23:15:25.895567Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2024-11-21T23:15:25.895660Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:45:2056], server id = [0:0:0], tablet id = 6, status = ERROR 2024-11-21T23:15:25.895672Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2024-11-21T23:15:25.895746Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2024-11-21T23:15:25.895925Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2024-11-21T23:15:25.895977Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2024-11-21T23:15:25.896106Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2024-11-21T23:15:25.896143Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] |85.2%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2024-11-21T23:15:25.330528Z node 4 :INTERCONNECT WARN: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T23:15:25.827324Z node 5 :INTERCONNECT WARN: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T23:15:26.362200Z node 8 :INTERCONNECT WARN: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2024-11-21T23:15:26.364931Z node 7 :INTERCONNECT WARN: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default |85.2%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TTopicApiDescribes::DescribeTopic >> TTopicApiDescribes::GetPartitionDescribe >> BsControllerConfig::ExtendByCreatingSeparateBox |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest >> BsControllerConfig::MoveGroups >> BsControllerConfig::Basic |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> BsControllerConfig::SelectAllGroups >> TIcNodeCache::GetNodesInfoTest >> TestProtocols::TestHTTPRequest [GOOD] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> Balancing::Balancing_OneTopic_TopicApi >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> DataShardReadIterator::ShouldHandleReadAck |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] |85.2%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |85.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::PDiskCreate [GOOD] >> TSequence::CreateSequenceParallel >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> DataShardReadIterator::ShouldReverseReadMultipleKeys |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |85.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> BsControllerConfig::ReassignGroupDisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T23:15:25.297674Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:25.303193Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:25.305388Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:25.305885Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:25.306708Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:25.306748Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:25.307001Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:25.320470Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:25.320625Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:25.320817Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:25.320930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:25.321019Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:25.321092Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:232:2066] recipient: [1:20:2067] 2024-11-21T23:15:25.333314Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:25.333471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:25.344339Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:25.344479Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:25.344589Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:25.344698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:25.344812Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:25.344860Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:25.344907Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:25.344965Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:25.355852Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:25.356024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:25.357402Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:25.357471Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:25.357616Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:25.371861Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2024-11-21T23:15:25.372595Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk2 2024-11-21T23:15:25.372655Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk3 2024-11-21T23:15:25.372685Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2024-11-21T23:15:25.372708Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1001 Path# /dev/disk1 2024-11-21T23:15:25.372731Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk3 2024-11-21T23:15:25.372754Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2024-11-21T23:15:25.372779Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk2 2024-11-21T23:15:25.372826Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk3 2024-11-21T23:15:25.372861Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2024-11-21T23:15:25.372886Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1002 Path# /dev/disk1 2024-11-21T23:15:25.372923Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk3 2024-11-21T23:15:25.372948Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2024-11-21T23:15:25.372977Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2024-11-21T23:15:25.373010Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1001 Path# /dev/disk3 2024-11-21T23:15:25.373038Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1002 Path# /dev/disk1 2024-11-21T23:15:25.373073Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2024-11-21T23:15:25.373099Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk3 2024-11-21T23:15:25.373122Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1001 Path# /dev/disk1 2024-11-21T23:15:25.373161Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2024-11-21T23:15:25.373190Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1002 Path# /dev/disk1 2024-11-21T23:15:25.373214Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1002 Path# /dev/disk2 2024-11-21T23:15:25.373245Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk3 2024-11-21T23:15:25.373272Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1002 Path# /dev/disk1 2024-11-21T23:15:25.373315Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2024-11-21T23:15:25.373343Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1000 Path# /dev/disk2 2024-11-21T23:15:25.373367Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1001 Path# /dev/disk1 2024-11-21T23:15:25.373408Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1002 Path# /dev/disk1 2024-11-21T23:15:25.373433Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1002 Path# /dev/disk2 2024-11-21T23:15:25.373456Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1001 Path# /dev/disk3 2024-11-21T23:15:25.373482Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1002 Path# /dev/disk1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T23:15:27.395617Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:27.396426Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:27.397739Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:27.398140Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:27.398730Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:27.398770Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:27.398938Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:27.407911Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:27.408031Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:27.408148Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:27.408259Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:27.408374Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:27.408457Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:232:2066] recipient: [11:20:2067] 2024-11-21T23:15:27.420257Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:27.420404Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:27.431121Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:27.431251Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:27.431324Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:27.431393Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:27.431508Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:27.431646Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:27.431692Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:27.431760Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:27.442610Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:27.442814Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:27.444252Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:27.444309Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:27.444416Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:27.444965Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2024-11-21T23:15:27.445400Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk3 2024-11-21T23:15:27.445490Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1001 Path# /dev/disk1 2024-11-21T23:15:27.445532Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk2 2024-11-21T23:15:27.445559Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk3 2024-11-21T23:15:27.445581Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk2 2024-11-21T23:15:27.445602Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1001 Path# /dev/disk1 2024-11-21T23:15:27.445621Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2024-11-21T23:15:27.445645Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk3 2024-11-21T23:15:27.445666Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1001 Path# /dev/disk1 2024-11-21T23:15:27.445698Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1001 Path# /dev/disk1 2024-11-21T23:15:27.445724Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk2 2024-11-21T23:15:27.445747Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1001 Path# /dev/disk3 2024-11-21T23:15:27.445768Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1002 Path# /dev/disk2 2024-11-21T23:15:27.445789Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk3 2024-11-21T23:15:27.445809Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2024-11-21T23:15:27.445830Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2024-11-21T23:15:27.445870Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1002 Path# /dev/disk2 2024-11-21T23:15:27.445907Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk3 2024-11-21T23:15:27.445957Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1001 Path# /dev/disk3 2024-11-21T23:15:27.445989Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1001 Path# /dev/disk1 2024-11-21T23:15:27.446023Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1002 Path# /dev/disk1 2024-11-21T23:15:27.446054Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1002 Path# /dev/disk2 2024-11-21T23:15:27.446075Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2024-11-21T23:15:27.446096Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1002 Path# /dev/disk2 2024-11-21T23:15:27.446129Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1001 Path# /dev/disk3 2024-11-21T23:15:27.446156Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1002 Path# /dev/disk2 2024-11-21T23:15:27.446178Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1001 Path# /dev/disk3 2024-11-21T23:15:27.446201Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1002 Path# /dev/disk1 2024-11-21T23:15:27.446222Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1002 Path# /dev/disk2 2024-11-21T23:15:27.446244Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1002 Path# /dev/disk1 |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> BsControllerConfig::SelectAllGroups [GOOD] >> DataShardReadIterator::ShouldReadRangeCellVec >> DataShardReadIteratorSysTables::ShouldRead >> DataShardReadIteratorBatchMode::RangeFull >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 23578, MsgBus: 10439 2024-11-21T23:14:50.790227Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875023678543365:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:50.790411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00109a/r3tmp/tmp12RGZB/pdisk_1.dat 2024-11-21T23:14:51.418976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:51.419092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:51.425483Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:51.429905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23578, node 1 2024-11-21T23:14:51.620880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:14:51.620916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:14:51.620927Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:14:51.621018Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10439 TClient is connected to server localhost:10439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:14:52.596201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:52.632422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:52.906086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:53.254187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:53.382192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:14:55.786548Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875023678543365:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:55.786646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:14:55.821034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875045153381419:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:55.821164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:56.059649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:14:56.103665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:14:56.183502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:14:56.264559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:14:56.348984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:14:56.440490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:14:56.506313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875049448349230:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:56.506500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:56.507219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875049448349235:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:14:56.523047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:14:56.557330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875049448349237:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:14:57.738429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2024-11-21T23:14:58.344123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:14:58.746082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2024-11-21T23:14:59.316100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2024-11-21T23:14:59.844609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:15:00.338934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2024-11-21T23:15:00.913765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T23:15:00.945233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T23:15:03.015037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710714:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30538, MsgBus: 11221 2024-11-21T23:15:03.727173Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875078028313204:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:03.727224Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00109a/r3tmp/tmpj90G1T/pdisk_1.dat 2024-11-21T23:15:03.833237Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:03.849217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:03.849303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:03.850812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30538, node 2 2024-11-21T23:15:03.898678Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:03.898703Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:03.898711Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:03.898836Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11221 TClient is connected to server localhost:11221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } C ... 81474976715671:1, at schemeshard: 72057594046644480 2024-11-21T23:15:08.634885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:15:08.727644Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875078028313204:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:08.727705Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:09.063556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2024-11-21T23:15:09.598828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T23:15:10.202666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T23:15:10.726271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T23:15:11.300081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T23:15:11.337576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T23:15:14.578172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715735:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2596, MsgBus: 27024 2024-11-21T23:15:15.720201Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875130410637500:2106];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:15.742642Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00109a/r3tmp/tmp7YvfRK/pdisk_1.dat 2024-11-21T23:15:15.842713Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:15.869689Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:15.869784Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:15.873823Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2596, node 3 2024-11-21T23:15:15.926987Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:15.927012Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:15.927021Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:15.927132Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27024 TClient is connected to server localhost:27024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:16.567275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:16.588791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:16.639266Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:16.807476Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:16.883501Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:19.015870Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875147590508322:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:19.015946Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:19.050652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:19.079854Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:19.106919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:19.135888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:19.160224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:19.188916Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:19.252528Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875147590508816:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:19.252563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875147590508821:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:19.252607Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:19.255746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:19.264614Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875147590508823:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:15:20.259696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2024-11-21T23:15:20.706085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:15:20.722866Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875130410637500:2106];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:20.722925Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:21.165533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2024-11-21T23:15:21.625531Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T23:15:22.135251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T23:15:22.628758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T23:15:23.077443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T23:15:23.121573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T23:15:27.102042Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715734:0, at schemeshard: 72057594046644480 >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |85.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_kqp/ydb-core-sys_view-ut_kqp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2024-11-21T23:15:27.863817Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:27.868093Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:27.869386Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:27.869694Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:27.870227Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:27.870254Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:27.870460Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:27.878915Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:27.879023Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:27.879182Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:27.879295Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:27.879399Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:27.879502Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:28.020484Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.105298s 2024-11-21T23:15:28.020620Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.105456s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:266:2068] recipient: [1:243:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:266:2068] recipient: [1:243:2077] Leader for TabletID 72057594037932033 is [1:268:2079] sender: [1:269:2068] recipient: [1:243:2077] 2024-11-21T23:15:25.648791Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:25.654900Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:25.657542Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:25.657986Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:25.658899Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:25.658945Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:25.659192Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:25.670357Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:25.670574Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:25.670708Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:25.670818Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:25.670918Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:25.670989Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:268:2079] sender: [1:294:2068] recipient: [1:22:2069] 2024-11-21T23:15:25.683008Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:25.683162Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:25.694141Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:25.694319Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:25.694453Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:25.694561Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:25.694708Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:25.694805Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:25.694855Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:25.694934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:25.705750Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:25.705898Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:25.707086Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:25.707142Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:25.707257Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:25.721008Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T23:15:25.721616Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk 2024-11-21T23:15:25.721665Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2024-11-21T23:15:25.721750Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2024-11-21T23:15:25.721788Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2024-11-21T23:15:25.721825Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2024-11-21T23:15:25.721850Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2024-11-21T23:15:25.721870Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2024-11-21T23:15:25.721891Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2024-11-21T23:15:25.721915Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2024-11-21T23:15:25.721944Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2024-11-21T23:15:25.721971Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2024-11-21T23:15:25.721996Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 1:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2024-11-21T23:15:25.742839Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:266:2068] recipient: [13:243:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:266:2068] recipient: [13:243:2077] Leader for TabletID 72057594037932033 is [13:268:2079] sender: [13:269:2068] recipient: [13:243:2077] 2024-11-21T23:15:27.900804Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:27.901670Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:27.903169Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:27.903575Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:27.904229Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:27.904263Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:27.904447Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:27.912548Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:27.912636Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:27.912706Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:27.912771Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:27.912852Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:27.912920Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:268:2079] sender: [13:294:2068] recipient: [13:22:2069] 2024-11-21T23:15:27.924821Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:27.924973Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:27.935735Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:27.935871Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:27.935951Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:27.936020Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:27.936100Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:27.936146Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:27.936215Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:27.936274Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:27.946884Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:27.946996Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:27.948030Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:27.948089Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:27.948189Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:27.948705Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T23:15:27.949108Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 24:1000 Path# /dev/disk 2024-11-21T23:15:27.949142Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2024-11-21T23:15:27.949170Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2024-11-21T23:15:27.949185Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2024-11-21T23:15:27.949217Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2024-11-21T23:15:27.949240Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2024-11-21T23:15:27.949255Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2024-11-21T23:15:27.949269Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2024-11-21T23:15:27.949282Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2024-11-21T23:15:27.949295Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2024-11-21T23:15:27.949310Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2024-11-21T23:15:27.949323Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2024-11-21T23:15:27.970224Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" >> DataShardReadIterator::ShouldReadKeyCellVec >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool >> TExternalTableTest::DropExternalTable >> TExternalTableTest::CreateExternalTable >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> TExternalTableTest::ParallelCreateSameExternalTable >> TExternalTableTest::SchemeErrors >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive >> TExternalTableTest::ReadOnlyMode >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalTableTest::SchemeErrors [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:33.101291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:33.101392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:33.101440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:33.101497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:33.101556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:33.101589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:33.101651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:33.102054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:33.182449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:33.182510Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.205792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:33.211072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:33.211266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:33.218055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:33.219230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:33.219962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.220280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:33.235893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.237247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.237309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.237561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:33.237615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.237659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:33.237741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.248321Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:33.384732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.384987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.385267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:33.385500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:33.385562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.391557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.391756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:33.391981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.392051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:33.392098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:33.392125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:33.395378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.395432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:33.395479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:33.396928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.396963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.396995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.397033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.399704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:33.403912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:33.404127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:33.405148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.405302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.405350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.405672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:33.405722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.405927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.406032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:33.408879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.408929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.409069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.409111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:33.409390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.409433Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:33.409508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:33.409534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.409567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:33.409596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.409637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:33.409662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:33.409727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:33.409758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:33.409790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:33.411474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.411587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.411631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:33.411681Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:33.411717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.411821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 4981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:15:33.515040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 125, subscribers: 0 2024-11-21T23:15:33.516409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2024-11-21T23:15:33.518022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2024-11-21T23:15:33.518119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2024-11-21T23:15:33.518685Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:33.518939Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 223us result status StatusSuccess 2024-11-21T23:15:33.519230Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.519800Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:33.519921Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 124us result status StatusSuccess 2024-11-21T23:15:33.520165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2024-11-21T23:15:33.520444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2024-11-21T23:15:33.520484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2024-11-21T23:15:33.520566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2024-11-21T23:15:33.520585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2024-11-21T23:15:33.520630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2024-11-21T23:15:33.520648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2024-11-21T23:15:33.521265Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2024-11-21T23:15:33.521364Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2024-11-21T23:15:33.521412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2024-11-21T23:15:33.521466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:338:2330] 2024-11-21T23:15:33.521575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2024-11-21T23:15:33.521595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:338:2330] 2024-11-21T23:15:33.521708Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2024-11-21T23:15:33.521784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2024-11-21T23:15:33.521814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:338:2330] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2024-11-21T23:15:33.522291Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:33.522518Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 236us result status StatusSuccess 2024-11-21T23:15:33.522834Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2024-11-21T23:15:33.525807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.526124Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2024-11-21T23:15:33.526212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2024-11-21T23:15:33.526349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2024-11-21T23:15:33.528420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2024-11-21T23:15:33.528562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:33.208034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:33.208127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:33.208168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:33.208207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:33.208260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:33.208285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:33.208363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:33.208649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:33.277493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:33.277554Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.288875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:33.292661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:33.292869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:33.300605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:33.301262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:33.301840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.302108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:33.306315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.307604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.307663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.307921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:33.307983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.308025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:33.308107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.314334Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:33.432025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.432246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.432487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:33.432679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:33.432733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.435875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.436016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:33.436187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.436240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:33.436266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:33.436288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:33.439706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.439755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:33.439782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:33.443157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.443197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.443235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.443275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.446364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:33.449317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:33.449522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:33.450564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.450688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.450732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.451039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:33.451126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.451299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.451370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:33.456105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.456174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.456362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.456402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:33.456736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.456782Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:33.456882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:33.456922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.457006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:33.457066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.457105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:33.457134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:33.457219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:33.457258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:33.457294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:33.459222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.459338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.459380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:33.459412Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:33.459467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.459612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... mentPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:33.518658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:33.519200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:33.519489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:15:33.520919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.520977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.521117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:15:33.521197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:15:33.521277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.521305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T23:15:33.521362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T23:15:33.521385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T23:15:33.521441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.521482Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:15:33.521628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:15:33.521672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:15:33.521730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T23:15:33.521768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:15:33.521800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:15:33.521829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:15:33.521908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:15:33.521952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T23:15:33.521985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2024-11-21T23:15:33.522012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T23:15:33.523273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:33.523365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:33.523395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:15:33.523437Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2024-11-21T23:15:33.523511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:15:33.525178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:33.525278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:33.525320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:15:33.525351Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T23:15:33.525380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:33.525453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T23:15:33.527512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:33.528634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:15:33.528836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:15:33.528877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:15:33.529181Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:15:33.529236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:15:33.529276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:325:2317] TestWaitNotification: OK eventTxId 102 2024-11-21T23:15:33.529746Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:33.529917Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 184us result status StatusSuccess 2024-11-21T23:15:33.530199Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T23:15:33.532632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.532868Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2024-11-21T23:15:33.532931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists:1 2024-11-21T23:15:33.533039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2024-11-21T23:15:33.535515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T23:15:33.535699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:15:33.535992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:15:33.536032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:15:33.536468Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:15:33.536567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:15:33.536603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:333:2325] TestWaitNotification: OK eventTxId 103 >> TExternalTableTest::Decimal [GOOD] >> TExternalTableTest::ReadOnlyMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:33.214217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:33.214456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:33.214508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:33.214565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:33.214630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:33.214669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:33.214728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:33.215052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:33.294483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:33.294538Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.308243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:33.312218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:33.312397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:33.321913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:33.322620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:33.323299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.323714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:33.328162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.329479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.329564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.329818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:33.329876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.329915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:33.329998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.337279Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:33.498253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.499936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.500218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:33.500510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:33.500589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.504066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.504233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:33.504473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.504535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:33.504589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:33.504626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:33.507003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.507080Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:33.507119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:33.509146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.509200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.509294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.509355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.513414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:33.516555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:33.516763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:33.517855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.518011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.518060Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.518413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:33.518483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.518663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.518781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:33.520877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.520930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.521109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.521179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:33.521583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.521640Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:33.521737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:33.521762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.521801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:33.521839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.521906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:33.521935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:33.522004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:33.522045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:33.522087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:33.524178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.524294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.524344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:33.524404Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:33.524463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.524567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2024-11-21T23:15:33.660497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.661268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2024-11-21T23:15:33.661379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2024-11-21T23:15:33.661643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2024-11-21T23:15:33.664402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.664622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2024-11-21T23:15:33.667474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.667884Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2024-11-21T23:15:33.668005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2024-11-21T23:15:33.668142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2024-11-21T23:15:33.670859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.671100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2024-11-21T23:15:33.673999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.674360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2024-11-21T23:15:33.674466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2024-11-21T23:15:33.674601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2024-11-21T23:15:33.676931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.677117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2024-11-21T23:15:33.679858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.680192Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2024-11-21T23:15:33.680329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2024-11-21T23:15:33.680468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2024-11-21T23:15:33.682835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.682982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2024-11-21T23:15:33.685890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.686248Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2024-11-21T23:15:33.686341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2024-11-21T23:15:33.687138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2024-11-21T23:15:33.689635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.689820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2024-11-21T23:15:33.692285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.692627Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2024-11-21T23:15:33.692708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2024-11-21T23:15:33.692846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2024-11-21T23:15:33.694897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.695031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:32.791892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:32.791983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:32.792038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:32.792083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:32.792143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:32.792181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:32.792239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:32.792578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:32.886008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:32.886073Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:32.898906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:32.903295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:32.903529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:32.910711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:32.911380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:32.912138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:32.912496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:32.935243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:32.936700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:32.936770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:32.937021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:32.937090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:32.937130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:32.937228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:32.956336Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:33.113345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.113604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.113825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:33.114064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:33.114128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.116695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.116862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:33.117059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.117117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:33.117151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:33.117181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:33.119185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.119247Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:33.119279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:33.120864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.120929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.120971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.121023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.132510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:33.135277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:33.135559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:33.136587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.136752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.136814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.137201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:33.137265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.137460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.137566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:33.151649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.151715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.151905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.151946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:33.152289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.152344Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:33.152443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:33.152487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.152543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:33.152627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.152675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:33.152712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:33.152804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:33.152849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:33.152887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:33.155403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.155593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.155642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:33.155695Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:33.155742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.155854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 95635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T23:15:34.095661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T23:15:34.095682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T23:15:34.096548Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:34.096623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:34.096656Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:15:34.096696Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T23:15:34.096735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:15:34.097403Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:34.097455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:34.097471Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:15:34.097504Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T23:15:34.097525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:34.099001Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:34.099059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:34.099079Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:15:34.099107Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T23:15:34.099151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:15:34.099221Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T23:15:34.101185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:34.102632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:34.103709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:15:34.103958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:15:34.104004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:15:34.104453Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:15:34.104566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:15:34.104618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:331:2323] TestWaitNotification: OK eventTxId 102 2024-11-21T23:15:34.105070Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:34.105269Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 242us result status StatusSuccess 2024-11-21T23:15:34.105747Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2024-11-21T23:15:34.108846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:34.109213Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2024-11-21T23:15:34.109309Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2024-11-21T23:15:34.109437Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2024-11-21T23:15:34.115338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2024-11-21T23:15:34.115497Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:15:34.115871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:15:34.115906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:15:34.116408Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:15:34.116491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:15:34.116529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:339:2331] TestWaitNotification: OK eventTxId 103 2024-11-21T23:15:34.117161Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:34.117336Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 201us result status StatusSuccess 2024-11-21T23:15:34.117548Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:33.530421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:33.530538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:33.530596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:33.530655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:33.530716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:33.530752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:33.530830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:33.531196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:33.624092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:33.624154Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.636997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:33.641271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:33.641487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:33.648594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:33.649247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:33.649929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.650300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:33.654791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.656187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.656254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.656513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:33.656567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.656611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:33.656707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.664398Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:33.800706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.800983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.801207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:33.801437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:33.801503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.804496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.804695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:33.804937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.805012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:33.805064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:33.805101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:33.807522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.807603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:33.807646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:33.809850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.809913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.809962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.810027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.813206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:33.814904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:33.815064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:33.815790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.815893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.815931Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.816183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:33.816229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.816384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.816463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:33.818491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.818544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.818762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.818811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:33.819168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.819219Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:33.819319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:33.819358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.819409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:33.819472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.819531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:33.819568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:33.819639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:33.819689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:33.819733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:33.821648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.821763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.821799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:33.821864Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:33.821907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.822005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 4 2024-11-21T23:15:34.200209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 0 2024-11-21T23:15:34.200295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:34.202538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T23:15:34.202683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T23:15:34.207857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2024-11-21T23:15:34.208081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2024-11-21T23:15:34.208360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:34.208424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:34.208625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T23:15:34.208727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:34.208771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:479:2440], at schemeshard: 72057594046678944, txId: 129, path id: 1 2024-11-21T23:15:34.208843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:479:2440], at schemeshard: 72057594046678944, txId: 129, path id: 5 2024-11-21T23:15:34.208939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2024-11-21T23:15:34.208995Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#129:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:34.209055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2024-11-21T23:15:34.209183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:34.210358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:15:34.210491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:15:34.210559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2024-11-21T23:15:34.210656Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2024-11-21T23:15:34.210699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T23:15:34.212476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:15:34.212588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:15:34.212618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2024-11-21T23:15:34.212647Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2024-11-21T23:15:34.212706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T23:15:34.212803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2024-11-21T23:15:34.214180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2024-11-21T23:15:34.214403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2024-11-21T23:15:34.216660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T23:15:34.219480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:34.219643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:34.219726Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2024-11-21T23:15:34.219903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2024-11-21T23:15:34.220092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2024-11-21T23:15:34.220152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T23:15:34.220670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T23:15:34.222368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:34.222427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:34.222609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2024-11-21T23:15:34.222688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:34.222749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:479:2440], at schemeshard: 72057594046678944, txId: 129, path id: 1 2024-11-21T23:15:34.222787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:479:2440], at schemeshard: 72057594046678944, txId: 129, path id: 5 FAKE_COORDINATOR: Erasing txId 129 2024-11-21T23:15:34.223086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2024-11-21T23:15:34.223128Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2024-11-21T23:15:34.223247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2024-11-21T23:15:34.223319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T23:15:34.223377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2024-11-21T23:15:34.223423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T23:15:34.223478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2024-11-21T23:15:34.223517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2024-11-21T23:15:34.223613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T23:15:34.223659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2024-11-21T23:15:34.223708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2024-11-21T23:15:34.223742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2024-11-21T23:15:34.224459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:15:34.224537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:15:34.224583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2024-11-21T23:15:34.224622Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2024-11-21T23:15:34.224664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2024-11-21T23:15:34.225529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:15:34.225635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:15:34.225672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2024-11-21T23:15:34.225708Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2024-11-21T23:15:34.225736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T23:15:34.225807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2024-11-21T23:15:34.229897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T23:15:34.230301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:32.873571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:32.873669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:32.873720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:32.873758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:32.873806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:32.873830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:32.873884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:32.874199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:33.034990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:33.035060Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.050812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:33.055178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:33.055416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:33.062643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:33.063277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:33.063942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.064257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:33.068864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.070148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.070217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.070506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:33.070562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.070609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:33.070715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.077487Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:33.228728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:33.228977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.229181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:33.229417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:33.229488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.232900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.233076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:33.233302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.233377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:33.233429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:33.233464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:33.236471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.236538Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:33.236580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:33.238422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.238476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.238530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.238589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.242710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:33.244772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:33.244991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:33.246094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:33.246237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:33.246292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.246624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:33.246677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:33.246864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.246983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:33.249021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:33.249079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:33.249259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:33.249318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:33.249711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:33.249766Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:33.249867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:33.249902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.249963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:33.250023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:33.250072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:33.250117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:33.250191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:33.250227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:33.250264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:33.252115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.252317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:33.252358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:33.252420Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:33.252476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:33.252591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2024-11-21T23:15:34.158038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:15:34.158376Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:34.158553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 8589936745 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:34.158628Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2024-11-21T23:15:34.158802Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T23:15:34.159024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:34.159105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:34.159165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:15:34.160095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:15:34.162475Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:34.162522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:34.162692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:15:34.162791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:15:34.162853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:15:34.162965Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:34.163004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:15:34.163049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T23:15:34.163077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2024-11-21T23:15:34.163142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T23:15:34.163503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:15:34.163561Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:15:34.163699Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:15:34.163743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:34.163813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:15:34.163867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:34.163909Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:15:34.163948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:15:34.164042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:15:34.164085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:15:34.164127Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2024-11-21T23:15:34.164195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T23:15:34.164245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T23:15:34.164267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T23:15:34.165405Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:34.165502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:34.165542Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:34.165614Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T23:15:34.165685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:15:34.167241Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:34.167327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:34.167358Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:34.167392Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T23:15:34.167436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:34.168107Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:34.168181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:34.168212Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:34.168241Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T23:15:34.168271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:15:34.168335Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:15:34.170295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:15:34.172248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:15:34.172330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:15:34.172546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T23:15:34.172596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T23:15:34.173037Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T23:15:34.173126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:15:34.173168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:332:2324] TestWaitNotification: OK eventTxId 101 2024-11-21T23:15:34.173675Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:34.173886Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 262us result status StatusSuccess 2024-11-21T23:15:34.174205Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> TExternalTableTest::DropTableTwice >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::ReplaceExternalTableIfNotExists >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive >> KqpMultishardIndex::DataColumnUpsertMixedSemantic-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T23:15:29.318101Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:29.324227Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:29.326385Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:29.331258Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:29.332109Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:29.332158Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:29.332391Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:29.345136Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:29.345271Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:29.345411Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:29.345515Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:29.345636Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:29.345728Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:229:2066] recipient: [1:20:2067] 2024-11-21T23:15:29.357249Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:29.357433Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:29.369984Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:29.370148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:29.370236Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:29.370336Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:29.370516Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:29.370612Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:29.370671Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:29.370767Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:29.381684Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:29.381828Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:29.382831Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:29.382876Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:29.382987Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:29.397784Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T23:15:29.399421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T23:15:29.400189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T23:15:31.353035Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:31.354031Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:31.356132Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:31.356606Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:31.357301Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:31.357344Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:31.357548Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:31.375805Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:31.375997Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:31.376185Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:31.376315Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:31.376434Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:31.376502Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:229:2066] recipient: [11:20:2067] 2024-11-21T23:15:31.388762Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:31.388988Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:31.401142Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:31.401314Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:31.401417Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:31.401512Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:31.401682Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:31.401790Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:31.401845Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:31.401905Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:31.415105Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:31.415255Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:31.416556Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:31.416621Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:31.416742Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:31.417171Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T23:15:31.418148Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2024-11-21T23:15:31.418876Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:204:2066] recipient: [21:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:204:2066] recipient: [21:183:2075] Leader for TabletID 72057594037932033 is [21:206:2077] sender: [21:207:2066] recipient: [21:183:2075] 2024-11-21T23:15:32.873640Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:32.874717Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:32.876341Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:32.876803Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:32.877443Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NAct ... 01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2024-11-21T23:15:32.937660Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2024-11-21T23:15:32.938305Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2024-11-21T23:15:32.943644Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2024-11-21T23:15:32.944333Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2024-11-21T23:15:32.945040Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2024-11-21T23:15:32.945657Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2024-11-21T23:15:32.946321Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2024-11-21T23:15:32.963361Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2024-11-21T23:15:32.964345Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2024-11-21T23:15:32.965082Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2024-11-21T23:15:32.965856Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2024-11-21T23:15:32.966577Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2024-11-21T23:15:32.967323Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2024-11-21T23:15:32.968092Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2024-11-21T23:15:32.968914Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2024-11-21T23:15:32.969584Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2024-11-21T23:15:32.970366Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:204:2066] recipient: [31:188:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:204:2066] recipient: [31:188:2075] Leader for TabletID 72057594037932033 is [31:206:2077] sender: [31:207:2066] recipient: [31:188:2075] 2024-11-21T23:15:34.584313Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:34.585328Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:34.587023Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:34.587478Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:34.588111Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:34.588152Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:34.588378Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:34.597885Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:34.598030Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:34.598142Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:34.598250Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:34.598438Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:34.598539Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:206:2077] sender: [31:229:2066] recipient: [31:20:2067] 2024-11-21T23:15:34.610379Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:34.610574Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:34.622196Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:34.622351Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:34.622453Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:34.622539Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:34.622660Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:34.622712Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:34.622759Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:34.622825Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:34.634228Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:34.634476Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:34.635721Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:34.635782Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:34.635911Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:34.636450Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2024-11-21T23:15:34.637438Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2024-11-21T23:15:34.638062Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2024-11-21T23:15:34.638717Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2024-11-21T23:15:34.639322Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2024-11-21T23:15:34.640004Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2024-11-21T23:15:34.640652Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2024-11-21T23:15:34.641333Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2024-11-21T23:15:34.641903Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2024-11-21T23:15:34.642643Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2024-11-21T23:15:34.643399Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2024-11-21T23:15:34.644155Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2024-11-21T23:15:34.644769Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2024-11-21T23:15:34.645457Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2024-11-21T23:15:34.646084Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2024-11-21T23:15:34.646811Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2024-11-21T23:15:34.647500Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2024-11-21T23:15:34.648326Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2024-11-21T23:15:34.649010Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2024-11-21T23:15:34.649737Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> TExternalTableTest::DropTableTwice [GOOD] >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL >> KqpUniqueIndex::UpdateOnNullInComplexFk >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> LabeledDbCounters::TwoTablets >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropTableTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:36.936282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:36.936391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:36.936451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:36.936510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:36.936573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:36.936602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:36.936661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:36.937042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:37.043065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:37.043138Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:37.083485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:37.087962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:37.088219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:37.124597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:37.134755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:37.135553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:37.136054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:37.163914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:37.165403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:37.165483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:37.165738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:37.165785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:37.165823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:37.165921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.182930Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:37.379093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:37.379344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.379595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:37.379847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:37.379919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.384132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:37.384318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:37.384541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.384610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:37.384668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:37.384702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:37.391327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.391403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:37.391476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:37.399397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.399493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.399544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:37.399597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:37.406674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:37.415225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:37.415500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:37.416654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:37.416822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:37.416868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:37.417195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:37.417247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:37.417456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:37.417561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:37.423587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:37.423652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:37.423867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:37.423918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:37.424326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.424381Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:37.424477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:37.424509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:37.424554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:37.424595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:37.424658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:37.424690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:37.424778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:37.424818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:37.424851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:37.426983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:37.427103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:37.427140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:37.427189Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:37.427242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:37.427361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T23:15:37.569619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2024-11-21T23:15:37.570305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:37.570446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:37.570517Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T23:15:37.570696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:37.570777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2024-11-21T23:15:37.570964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:37.571041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:15:37.571076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:15:37.571814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:15:37.572944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T23:15:37.574718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:37.574752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:37.574897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:15:37.575005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:15:37.575127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:37.575168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 2024-11-21T23:15:37.575216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T23:15:37.575251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2024-11-21T23:15:37.575591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.575638Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T23:15:37.575739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T23:15:37.575771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:15:37.575813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T23:15:37.575854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:15:37.575890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:15:37.575972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:15:37.576058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:37.576135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:15:37.576198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2024-11-21T23:15:37.576237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T23:15:37.576275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T23:15:37.576301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2024-11-21T23:15:37.576681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:37.576756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:37.576794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:15:37.576839Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2024-11-21T23:15:37.576878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:15:37.577367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:15:37.577416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:15:37.577475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:15:37.577855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:37.577935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:37.577961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:15:37.577997Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T23:15:37.578027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:37.578786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:37.578862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:37.578887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:15:37.579009Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T23:15:37.579038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:15:37.579124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T23:15:37.582714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:15:37.582954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:15:37.583580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:15:37.584108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:15:37.584403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:15:37.584449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:15:37.584913Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:15:37.585008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:15:37.585056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:366:2358] TestWaitNotification: OK eventTxId 103 2024-11-21T23:15:37.585570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:37.585762Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 219us result status StatusPathDoesNotExist 2024-11-21T23:15:37.586131Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:37.607154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:37.607244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:37.607281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:37.607343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:37.607403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:37.607434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:37.607538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:37.607864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:37.696727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:37.696786Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:37.719868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:37.728192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:37.728394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:37.742482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:37.744400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:37.746681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:37.747111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:37.754123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:37.755501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:37.755568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:37.755830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:37.755883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:37.755920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:37.756016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.770576Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:37.949456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:37.949723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.949949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:37.950197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:37.950272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.963409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:37.963619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:37.963846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.963914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:37.963972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:37.964011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:37.974716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.974815Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:37.974860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:37.976989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.977044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.977087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:37.977137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:37.980772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:37.985653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:37.985886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:37.986948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:37.987086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:37.987132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:37.987485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:37.987551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:37.987738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:37.987852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:37.990315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:37.990383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:37.990570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:37.990614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:37.990998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:37.991055Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:37.991163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:37.991201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:37.991253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:37.991290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:37.991329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:37.991356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:37.991432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:37.991484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:37.991518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:37.993315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:37.993421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:37.993454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:37.993499Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:37.993542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:37.993635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : 72057594046678944, LocalPathId: 2] 2024-11-21T23:15:38.056808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:15:38.056902Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:38.056935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:15:38.056975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T23:15:38.056999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:15:38.057302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.057343Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:15:38.057451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:15:38.057490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:38.057562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:15:38.057609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:15:38.057646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:15:38.057681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:15:38.057774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:15:38.057817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T23:15:38.057855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T23:15:38.057878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T23:15:38.059087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:38.059174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:38.059205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:38.059249Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:15:38.059286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:38.060765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:38.060848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:15:38.060877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:15:38.060902Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T23:15:38.060933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:15:38.061001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:15:38.064808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:15:38.065848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2024-11-21T23:15:38.066073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T23:15:38.066140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T23:15:38.066556Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T23:15:38.066649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:15:38.066694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:302:2294] TestWaitNotification: OK eventTxId 101 2024-11-21T23:15:38.067190Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:38.067409Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 265us result status StatusSuccess 2024-11-21T23:15:38.067763Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T23:15:38.083352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:38.083779Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2024-11-21T23:15:38.083858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2024-11-21T23:15:38.083901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2024-11-21T23:15:38.086502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:38.086671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:15:38.086983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:15:38.087069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:15:38.087488Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:15:38.087607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:15:38.087642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:310:2302] TestWaitNotification: OK eventTxId 102 2024-11-21T23:15:38.088085Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:38.088243Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 171us result status StatusPathDoesNotExist 2024-11-21T23:15:38.088412Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:37.850275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:37.850369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:37.850448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:37.850500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:37.850565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:37.850596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:37.850672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:37.851032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:37.924935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:37.925001Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:37.955635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:37.960372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:37.960612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:37.979712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:37.987405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:37.988189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:37.988630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:38.011840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:38.013272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:38.013341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:38.013579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:38.013629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:38.013676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:38.013777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.056312Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:38.222522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:38.222777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.223044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:38.223283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:38.223349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.231624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:38.231837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:38.232070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.232129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:38.232203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:38.232246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:38.235614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.235698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:38.235759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:38.243713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.243795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.243844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:38.243903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:38.260718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:38.275582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:38.275888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:38.277203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:38.277382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:38.277435Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:38.277828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:38.277909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:38.278143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:38.278250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:38.287968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:38.288059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:38.288292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:38.288342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:38.288846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.288920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:38.289030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:38.289067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:38.289132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:38.289316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:38.289386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:38.289419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:38.289516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:38.289563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:38.289659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:38.292764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:38.292920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:38.292975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:38.293040Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:38.293086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:38.293214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... : Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:15:38.474853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2024-11-21T23:15:38.474896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:15:38.476308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:38.476413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:38.476446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:15:38.476480Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:15:38.476523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:15:38.476616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2024-11-21T23:15:38.478729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T23:15:38.478900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2024-11-21T23:15:38.479496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:38.479636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:38.479712Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 103:0 HandleReply TEvOperationPlan: step# 5000004 2024-11-21T23:15:38.479848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2024-11-21T23:15:38.480015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:38.480078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:38.481193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:15:38.482921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T23:15:38.484417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:38.484454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:38.484631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:15:38.484730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:15:38.484847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:38.484894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 2024-11-21T23:15:38.484932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T23:15:38.484955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T23:15:38.485267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:15:38.485324Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T23:15:38.485434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T23:15:38.485468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:15:38.485515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T23:15:38.485562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:15:38.485595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:15:38.485627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:15:38.485724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:15:38.485783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:15:38.485828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T23:15:38.485863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2024-11-21T23:15:38.485892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2024-11-21T23:15:38.487237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:38.487330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:38.487366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:15:38.487402Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2024-11-21T23:15:38.487461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:15:38.488724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:38.488802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:15:38.488828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:15:38.488887Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2024-11-21T23:15:38.488919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:38.488991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T23:15:38.492632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:15:38.493031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2024-11-21T23:15:38.493249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:15:38.493289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:15:38.493767Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:15:38.493891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:15:38.493928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:361:2353] TestWaitNotification: OK eventTxId 103 2024-11-21T23:15:38.494478Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:38.494745Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 275us result status StatusSuccess 2024-11-21T23:15:38.495090Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DbCounters::TabletsSimple >> SystemView::QueryStatsRetries >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |85.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:15:29.804372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:15:29.804462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:29.804530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:15:29.804571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:15:29.804647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:15:29.804685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:15:29.804747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:15:29.805077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:29.889539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:29.889598Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:29.903036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:29.906732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:15:29.906946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:15:29.913387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:15:29.914017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:15:29.914679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:29.914995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:15:29.922339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:29.923879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:29.923949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:29.924168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:15:29.924220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:29.924275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:15:29.924361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:15:29.931351Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:15:30.050372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:15:30.050650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:30.050902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:15:30.051152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:15:30.051216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:30.057799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:30.057995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:15:30.058245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:30.058326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:15:30.058370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:15:30.058431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:15:30.063716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:30.063790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:30.063851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:15:30.072235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:30.072304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:30.072354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:30.072403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:15:30.076332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:15:30.079467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:15:30.079736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:15:30.081047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:15:30.081219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:15:30.081273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:30.081788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:15:30.081858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:15:30.082072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:30.082248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:15:30.085375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:15:30.085429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:15:30.085639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:15:30.085686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:15:30.086116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:15:30.086169Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:15:30.086797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:15:30.086866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:30.086926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:15:30.086998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:15:30.087043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:15:30.087079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:15:30.087167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:15:30.087213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:15:30.087254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:15:30.089839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:30.089975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:15:30.090019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:15:30.090078Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:15:30.090125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:15:30.090246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 46678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:40.662057Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:15:40.662091Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:15:40.662128Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T23:15:40.662168Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:15:40.662284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2024-11-21T23:15:40.662320Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:15:40.667246Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.667322Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2024-11-21T23:15:40.667486Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:335:2315] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T23:15:40.668112Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T23:15:40.668167Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T23:15:40.668228Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2024-11-21T23:15:40.668290Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:40.668700Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:15:40.668899Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:15:40.668952Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2024-11-21T23:15:40.669007Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2024-11-21T23:15:40.669114Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2024-11-21T23:15:40.669813Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.669860Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2024-11-21T23:15:40.669950Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:337:2316] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T23:15:40.675585Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2024-11-21T23:15:40.675656Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2024-11-21T23:15:40.675730Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:15:40.675786Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:15:40.676164Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:15:40.676312Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2024-11-21T23:15:40.676353Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2024-11-21T23:15:40.676389Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2024-11-21T23:15:40.676444Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2024-11-21T23:15:40.676544Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:418:2373] message: TxId: 102 2024-11-21T23:15:40.676617Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2024-11-21T23:15:40.676692Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:15:40.676746Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:15:40.676890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:15:40.676953Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2024-11-21T23:15:40.676986Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2024-11-21T23:15:40.677033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:15:40.677065Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2024-11-21T23:15:40.677091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2024-11-21T23:15:40.677142Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:15:40.677177Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2024-11-21T23:15:40.677201Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2024-11-21T23:15:40.677258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T23:15:40.682837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:40.682912Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.683209Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:40.683242Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.683416Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435080, Sender [7:123:2149], Recipient [7:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2024-11-21T23:15:40.683490Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2024-11-21T23:15:40.683590Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:15:40.683669Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T23:15:40.683789Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:15:40.684821Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:40.684866Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.684918Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:40.684979Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.685046Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:40.685074Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.685174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:15:40.685206Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.699722Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.699911Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2024-11-21T23:15:40.700049Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:418:2373] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2024-11-21T23:15:40.700238Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:15:40.700346Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:510:2464] 2024-11-21T23:15:40.700610Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:15:40.700794Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:512:2466], Recipient [7:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:15:40.700845Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:15:40.700883Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2024-11-21T23:15:40.701508Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:587:2541], Recipient [7:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2024-11-21T23:15:40.701589Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T23:15:40.701733Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:15:40.702013Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 285us result status StatusPathDoesNotExist 2024-11-21T23:15:40.702224Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::PgTablesOneSchemeShardDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> TTopicApiDescribes::DescribeConsumer [GOOD] |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |85.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> SystemView::VSlotsFields >> BasicStatistics::TwoServerlessDbs [GOOD] >> SystemView::TopPartitionsFields |85.4%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::TabletsFields >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> SystemView::Nodes [GOOD] >> SystemView::PDisksFields >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2024-11-21T23:15:26.616838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875178201065629:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:26.616884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:26.682586Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875177108961708:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:26.683012Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:26.850089Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:26.858261Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024c5/r3tmp/tmpZ2WDqr/pdisk_1.dat 2024-11-21T23:15:27.096775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:27.096934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:27.100500Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:15:27.101317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:27.131143Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:27.136955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:27.137032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15065, node 1 2024-11-21T23:15:27.142639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:27.149097Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:27.149139Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:27.267012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0024c5/r3tmp/yandexxCa9rs.tmp 2024-11-21T23:15:27.267068Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0024c5/r3tmp/yandexxCa9rs.tmp 2024-11-21T23:15:27.270425Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0024c5/r3tmp/yandexxCa9rs.tmp 2024-11-21T23:15:27.270572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:27.316972Z INFO: TTestServer started on Port 15415 GrpcPort 15065 TClient is connected to server localhost:15415 PQClient connected to localhost:15065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:27.672603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:27.745720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:15:30.454209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875194288831036:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:30.454293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875194288831068:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:30.454453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:30.464526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:15:30.499037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875194288831073:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:15:31.081498Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439875194288831116:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:31.081638Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875195380935966:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:31.083555Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTYwZjA1MDEtOTU1Yjc4OWYtNmI0M2Q0MTQtZjA2YThhOGQ=, ActorId: [2:7439875194288831033:2279], ActorState: ExecuteState, TraceId: 01jd8g4r0gf0fhrrjf6edzr3gq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:31.085791Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:31.086995Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjMwZjI1YTEtMmQ2YmZhMTMtZDY5NjFkNjctOGM3Mzg4YWI=, ActorId: [1:7439875195380935912:2305], ActorState: ExecuteState, TraceId: 01jd8g4r4xakzzra5br3e0m7h9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:31.087427Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:31.090868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:31.289000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:31.479068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:31.617762Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875178201065629:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:31.617845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:31.667024Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875177108961708:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:31.667086Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T23:15:32.010122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8g4sa53btbyknw1888vztd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc2Yzk0OTItZTlhZTc4NC02MWRlOWRmYi1kYWJhMDMyYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439875203970870930:3072] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2024-11-21T23:15:37.150676Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2024-11-21T23:15:37.690830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jd8g4yjfcz7t5kc35t7y15yh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUzMjA4NjQtZWMzOTM1NGMtOTY1NjQ1YTktNDcxYWUxYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:37.778200Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billi ... onsumer" include_location: true 2024-11-21T23:15:39.925633Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875234035643742:2556]: Request location 2024-11-21T23:15:39.926101Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875234035643744:2557] connected; active server actors: 1 2024-11-21T23:15:39.926332Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 2 2024-11-21T23:15:39.926361Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 2, Generation 2 2024-11-21T23:15:39.926378Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 1, Generation 2 2024-11-21T23:15:39.926409Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 2 2024-11-21T23:15:39.926432Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 2, Generation 2 2024-11-21T23:15:39.926449Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 2, Generation 2 2024-11-21T23:15:39.926462Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 2, Generation 2 2024-11-21T23:15:39.926476Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 1, Generation 2 2024-11-21T23:15:39.926489Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 1, Generation 2 2024-11-21T23:15:39.926499Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 1, Generation 2 2024-11-21T23:15:39.926511Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 2, Generation 2 2024-11-21T23:15:39.926524Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 2, Generation 2 2024-11-21T23:15:39.926534Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 1, Generation 2 2024-11-21T23:15:39.926544Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 1, Generation 2 2024-11-21T23:15:39.926556Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 2, Generation 2 2024-11-21T23:15:39.926586Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875234035643742:2556]: Got location 2024-11-21T23:15:39.927431Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875234035643744:2557] disconnected; active server actors: 1 2024-11-21T23:15:39.927469Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875234035643744:2557] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1732230938004 tx_id: 281474976710676 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 2 generation: 2 } } } } } 2024-11-21T23:15:39.932822Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2024-11-21T23:15:39.932974Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1732230938004 tx_id: 281474976710676 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2024-11-21T23:15:39.943647Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2024-11-21T23:15:39.943729Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } 2024-11-21T23:15:40.419658Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439875238330611076:2560], TxId: 281474976710683, task: 1, CA Id [1:7439875238330611074:2560]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2024-11-21T23:15:40.455491Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439875238330611076:2560], TxId: 281474976710683, task: 1, CA Id [1:7439875238330611074:2560]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:15:40.506490Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439875238330611076:2560], TxId: 281474976710683, task: 1, CA Id [1:7439875238330611074:2560]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:15:40.573900Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439875238330611076:2560], TxId: 281474976710683, task: 1, CA Id [1:7439875238330611074:2560]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:15:40.669821Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439875238330611076:2560], TxId: 281474976710683, task: 1, CA Id [1:7439875238330611074:2560]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:15:40.827237Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439875238330611076:2560], TxId: 281474976710683, task: 1, CA Id [1:7439875238330611074:2560]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:15:41.123060Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439875238330611076:2560], TxId: 281474976710683, task: 1, CA Id [1:7439875238330611074:2560]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2024-11-21T23:15:41.387210Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710684. Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:15:41.387370Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7439875242625578448:2561] TxId: 281474976710684. Ctx: { TraceId: 01jd8g524b7t1wsfcftns8qkq4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg0ZDk5NDYtMmY4YWE1ZjEtZDg1MGVjMy03OTdlMDdhMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2024-11-21T23:15:41.395720Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzg0ZDk5NDYtMmY4YWE1ZjEtZDg1MGVjMy03OTdlMDdhMQ==, ActorId: [1:7439875238330611120:2561], ActorState: ExecuteState, TraceId: 01jd8g524b7t1wsfcftns8qkq4, Create QueryResponse for error on request, msg: 2024-11-21T23:15:41.415928Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jd8g52gferrym3yyz7xt97we" } } YdbStatus: UNAVAILABLE ConsumedRu: 273 } 2024-11-21T23:15:41.622516Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439875238330611076:2560], TxId: 281474976710683, task: 1, CA Id [1:7439875238330611074:2560]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 >> TConsoleTests::TestRemoveServerlessTenant >> BsControllerConfig::MoveGroups [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TIcNodeCache::GetNodesInfoTest [GOOD] >> SystemView::PartitionStatsOneSchemeShard >> SystemView::ConcurrentScans ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2967:2106] recipient: [1:2845:2115] 2024-11-21T23:15:28.508890Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:28.513811Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:28.516083Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:28.516512Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:28.517209Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:28.517244Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:28.517493Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:28.528848Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:28.528984Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:28.529106Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:28.529194Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:28.529280Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:28.529336Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2992:2106] recipient: [1:60:2107] 2024-11-21T23:15:28.541822Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:28.541981Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:28.553448Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:28.553566Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:28.553655Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:28.553742Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:28.553865Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:28.553930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:28.553961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:28.554005Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:28.564672Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:28.564806Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:28.565973Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:28.566017Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:28.566117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:28.584815Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 100 PDiskFilter { Property { Type: ROT } } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "second storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } } 2024-11-21T23:15:28.586273Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 50:1000 Path# /dev/disk2 2024-11-21T23:15:28.586334Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 50:1001 Path# /dev/disk1 2024-11-21T23:15:28.586358Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1000 Path# /dev/disk3 2024-11-21T23:15:28.586383Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1001 Path# /dev/disk2 2024-11-21T23:15:28.586428Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 49:1002 Path# /dev/disk1 2024-11-21T23:15:28.586456Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 48:1000 Path# /dev/disk2 2024-11-21T23:15:28.586481Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 48:1001 Path# /dev/disk1 2024-11-21T23:15:28.586536Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 47:1000 Path# /dev/disk2 2024-11-21T23:15:28.586575Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 47:1001 Path# /dev/disk1 2024-11-21T23:15:28.586614Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 46:1000 Path# /dev/disk2 2024-11-21T23:15:28.586642Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 46:1001 Path# /dev/disk1 2024-11-21T23:15:28.586663Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 45:1000 Path# /dev/disk2 2024-11-21T23:15:28.586683Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 45:1001 Path# /dev/disk1 2024-11-21T23:15:28.586703Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 44:1000 Path# /dev/disk2 2024-11-21T23:15:28.586741Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 44:1001 Path# /dev/disk1 2024-11-21T23:15:28.586765Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 43:1000 Path# /dev/disk2 2024-11-21T23:15:28.586787Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 43:1001 Path# /dev/disk1 2024-11-21T23:15:28.586807Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 42:1000 Path# /dev/disk2 2024-11-21T23:15:28.586831Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 42:1001 Path# /dev/disk1 2024-11-21T23:15:28.586850Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 41:1000 Path# /dev/disk2 2024-11-21T23:15:28.586883Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 41:1001 Path# /dev/disk1 2024-11-21T23:15:28.586910Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 40:1000 Path# /dev/disk2 2024-11-21T23:15:28.586935Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 40:1001 Path# /dev/disk1 2024-11-21T23:15:28.586971Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisk ... 0:1000 Path# /dev/disk2 2024-11-21T23:15:37.797897Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1000 Path# /dev/disk3 2024-11-21T23:15:37.797935Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1001 Path# /dev/disk1 2024-11-21T23:15:37.797966Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1000 Path# /dev/disk1 2024-11-21T23:15:37.797990Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1000 Path# /dev/disk1 2024-11-21T23:15:37.798014Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 70:1002 Path# /dev/disk2 2024-11-21T23:15:37.798036Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1000 Path# /dev/disk3 2024-11-21T23:15:37.798060Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1001 Path# /dev/disk3 2024-11-21T23:15:37.798084Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1001 Path# /dev/disk2 2024-11-21T23:15:37.798105Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 67:1002 Path# /dev/disk1 2024-11-21T23:15:37.798127Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1000 Path# /dev/disk2 2024-11-21T23:15:37.798147Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1001 Path# /dev/disk3 2024-11-21T23:15:37.798172Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1000 Path# /dev/disk2 2024-11-21T23:15:37.798195Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 68:1002 Path# /dev/disk2 2024-11-21T23:15:37.798217Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 62:1001 Path# /dev/disk3 2024-11-21T23:15:37.798238Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1000 Path# /dev/disk3 2024-11-21T23:15:37.798262Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1001 Path# /dev/disk1 2024-11-21T23:15:37.798289Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1002 Path# /dev/disk2 2024-11-21T23:15:37.798313Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1000 Path# /dev/disk3 2024-11-21T23:15:37.798335Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1001 Path# /dev/disk3 2024-11-21T23:15:37.798357Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1001 Path# /dev/disk2 2024-11-21T23:15:37.798379Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 64:1002 Path# /dev/disk1 2024-11-21T23:15:37.805486Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 69:1002 Path# /dev/disk2 2024-11-21T23:15:37.805556Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 63:1001 Path# /dev/disk3 2024-11-21T23:15:37.805584Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 62:1002 Path# /dev/disk1 2024-11-21T23:15:37.805614Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 63:1002 Path# /dev/disk1 2024-11-21T23:15:37.805643Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1001 Path# /dev/disk3 2024-11-21T23:15:37.805672Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 65:1002 Path# /dev/disk2 2024-11-21T23:15:37.805700Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 57:1001 Path# /dev/disk1 2024-11-21T23:15:37.805727Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1002 Path# /dev/disk2 2024-11-21T23:15:37.805754Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1001 Path# /dev/disk3 2024-11-21T23:15:37.805781Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 60:1001 Path# /dev/disk1 2024-11-21T23:15:37.805807Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 61:1002 Path# /dev/disk1 2024-11-21T23:15:37.805834Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1002 Path# /dev/disk2 2024-11-21T23:15:37.805893Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 60:1002 Path# /dev/disk3 2024-11-21T23:15:37.805934Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2024-11-21T23:15:37.805959Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 66:1002 Path# /dev/disk1 2024-11-21T23:15:37.805988Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1002 Path# /dev/disk2 2024-11-21T23:15:37.806013Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 59:1001 Path# /dev/disk3 2024-11-21T23:15:37.806040Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 58:1001 Path# /dev/disk1 2024-11-21T23:15:37.806065Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 59:1002 Path# /dev/disk1 2024-11-21T23:15:37.806091Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1002 Path# /dev/disk2 2024-11-21T23:15:37.806116Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 58:1002 Path# /dev/disk3 2024-11-21T23:15:37.806144Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 53:1001 Path# /dev/disk1 2024-11-21T23:15:37.806175Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 56:1001 Path# /dev/disk1 2024-11-21T23:15:37.806221Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1002 Path# /dev/disk2 2024-11-21T23:15:37.806258Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 56:1002 Path# /dev/disk3 2024-11-21T23:15:37.806285Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1001 Path# /dev/disk1 2024-11-21T23:15:37.806310Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1001 Path# /dev/disk3 2024-11-21T23:15:37.806336Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1000 Path# /dev/disk1 2024-11-21T23:15:37.806364Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1002 Path# /dev/disk1 2024-11-21T23:15:37.806413Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1001 Path# /dev/disk3 2024-11-21T23:15:37.806443Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1000 Path# /dev/disk1 2024-11-21T23:15:37.806468Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1001 Path# /dev/disk2 2024-11-21T23:15:37.806494Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1002 Path# /dev/disk2 2024-11-21T23:15:37.806526Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1001 Path# /dev/disk3 2024-11-21T23:15:37.806554Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1002 Path# /dev/disk1 2024-11-21T23:15:37.806581Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 54:1002 Path# /dev/disk2 2024-11-21T23:15:37.806608Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1002 Path# /dev/disk2 2024-11-21T23:15:37.806636Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 57:1002 Path# /dev/disk3 2024-11-21T23:15:37.806660Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1000 Path# /dev/disk2 2024-11-21T23:15:37.806688Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 53:1002 Path# /dev/disk3 2024-11-21T23:15:37.806714Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 52:1001 Path# /dev/disk1 2024-11-21T23:15:37.806744Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 52:1002 Path# /dev/disk3 2024-11-21T23:15:37.806776Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1002 Path# /dev/disk1 2024-11-21T23:15:37.806803Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1002 Path# /dev/disk3 2024-11-21T23:15:37.806854Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1001 Path# /dev/disk3 2024-11-21T23:15:37.806894Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1002 Path# /dev/disk2 2024-11-21T23:15:37.806923Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 55:1002 Path# /dev/disk3 2024-11-21T23:15:37.806948Z node 51 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 51:1002 Path# /dev/disk1 2024-11-21T23:15:38.081614Z node 51 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.287742s 2024-11-21T23:15:38.081807Z node 51 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.287957s 2024-11-21T23:15:38.101657Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T23:15:38.179891Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2024-11-21T23:15:38.197327Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T23:15:38.312534Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2024-11-21T23:15:38.331856Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2024-11-21T23:15:38.472705Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2024-11-21T23:15:38.499666Z node 51 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2024-11-21T23:12:28.626210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:28.626566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:28.626658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002701/r3tmp/tmpoimfRs/pdisk_1.dat 2024-11-21T23:12:29.096379Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28191, node 1 2024-11-21T23:12:29.574141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:29.574213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:29.574268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:29.578948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:12:29.628621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:12:29.756379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:29.756518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:29.783412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5961 2024-11-21T23:12:30.626587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:12:35.102627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:35.102764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:35.163255Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:12:35.178316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:35.534694Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:35.599850Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:12:35.599968Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:12:35.640805Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:12:35.641989Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:12:35.642263Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:12:35.642343Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:12:35.642489Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:12:35.642545Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:12:35.642605Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:12:35.642667Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:12:35.643115Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:12:35.943741Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:12:35.943875Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1758:2550], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:12:35.948949Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1765:2556] 2024-11-21T23:12:35.958736Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1797:2574] 2024-11-21T23:12:35.959953Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1797:2574], schemeshard id = 72075186224037889 2024-11-21T23:12:35.962722Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Shared 2024-11-21T23:12:36.050017Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:12:36.050096Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:12:36.050170Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2024-11-21T23:12:36.077197Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:36.077317Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:36.106143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:12:36.124725Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:12:36.124894Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:12:36.180594Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:12:36.200395Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:36.243843Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:12:36.674564Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:12:36.862263Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:12:37.696012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:12:38.600892Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:38.830955Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2024-11-21T23:12:38.831043Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037899 2024-11-21T23:12:38.831156Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2490:2904], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037899 2024-11-21T23:12:38.832714Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2491:2905] 2024-11-21T23:12:38.832965Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2491:2905], schemeshard id = 72075186224037899 2024-11-21T23:12:39.891006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:12:40.547556Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:41.089120Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2024-11-21T23:12:41.089196Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037905 2024-11-21T23:12:41.089307Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2982:3110], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037905 2024-11-21T23:12:41.092677Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2988:3112] 2024-11-21T23:12:41.120684Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:2988:3112], schemeshard id = 72075186224037905 2024-11-21T23:12:42.880956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3106:3361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.881139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:42.908917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2024-11-21T23:12:43.311992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3257:3397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:43.312488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:43.314270Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3262:3401]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:12:43.314641Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:12:43.314841Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2024-11-21T23:12:43.314919Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3265:3404] 2024-11-21T23:12:43.314997Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3265:3404] 2024-11-21T23:12:43.315756Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:3266:3240] 2024-11-21T23:12:43.316094Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3265:3404], server id = [2:3266:3240], tablet id = 72075186224037897, status = OK 2024-11-21T23:12:43.316307Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:3266:3240], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T23:12:43.316405Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T23:12:43.316690Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:12:43.316782Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3262:3401], StatRequests.size() = 1 2024-11-21T23:12:43.349442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3270:3408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error ... -11-21T23:15:32.857112Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:8849:6343], StatRequests.size() = 1 2024-11-21T23:15:33.242129Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:15:33.242221Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:15:33.242302Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037889, LocalPathId: 3] is data table. 2024-11-21T23:15:33.242361Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T23:15:33.242925Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T23:15:33.263942Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T23:15:33.274587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8874:6362], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.274728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8884:6367], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.274867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.296375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037889 2024-11-21T23:15:33.384205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8888:6370], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2024-11-21T23:15:33.720193Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:9011:6438]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:15:33.720588Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2024-11-21T23:15:33.720954Z node 2 :STATISTICS DEBUG: [72075186224037897] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2024-11-21T23:15:33.721022Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2024-11-21T23:15:33.721199Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:15:33.721293Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:9011:6438], StatRequests.size() = 1 2024-11-21T23:15:33.903045Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWRjOTMyOTgtOTBiMjIyODYtN2E4NGNmM2EtYjYxM2QyOTg=, TxId: 2024-11-21T23:15:33.903155Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWRjOTMyOTgtOTBiMjIyODYtN2E4NGNmM2EtYjYxM2QyOTg=, TxId: 2024-11-21T23:15:33.903873Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T23:15:33.922833Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037889, LocalPathId: 3] 2024-11-21T23:15:33.922922Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T23:15:33.982819Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T23:15:33.982913Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T23:15:34.042014Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3613:3353], schemeshard count = 1 2024-11-21T23:15:34.364375Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2024-11-21T23:15:34.364480Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 213.000000s, at schemeshard: 72075186224037899 2024-11-21T23:15:34.364802Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2024-11-21T23:15:34.380099Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T23:15:34.526293Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9059:6464]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:15:34.526682Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2024-11-21T23:15:34.526737Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9059:6464], StatRequests.size() = 1 2024-11-21T23:15:35.709005Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9108:6496]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:15:35.709306Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2024-11-21T23:15:35.709353Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9108:6496], StatRequests.size() = 1 2024-11-21T23:15:36.163390Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:15:36.174949Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:15:36.175020Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:15:36.175064Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2024-11-21T23:15:36.175100Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T23:15:36.175473Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T23:15:36.182865Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T23:15:36.252721Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGRkYjkzNTYtMjBjODdmMjMtNmIwNTk1MjctYmI1ZDQzOGM=, TxId: 2024-11-21T23:15:36.252797Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGRkYjkzNTYtMjBjODdmMjMtNmIwNTk1MjctYmI1ZDQzOGM=, TxId: 2024-11-21T23:15:36.253465Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T23:15:36.280597Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2024-11-21T23:15:36.280663Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T23:15:36.295522Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2024-11-21T23:15:36.295595Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 198.000000s, at schemeshard: 72075186224037905 2024-11-21T23:15:36.295798Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2024-11-21T23:15:36.311418Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T23:15:37.273584Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9190:6550]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:15:37.273930Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2024-11-21T23:15:37.273976Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9190:6550], StatRequests.size() = 1 2024-11-21T23:15:38.834126Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9244:6582]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:15:38.834555Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2024-11-21T23:15:38.834612Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9244:6582], StatRequests.size() = 1 2024-11-21T23:15:39.221831Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 3 2024-11-21T23:15:39.222118Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:15:39.222674Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:15:39.235828Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:15:39.235907Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:15:39.237577Z node 2 :STATISTICS DEBUG: [72075186224037897] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2024-11-21T23:15:39.237647Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2024-11-21T23:15:39.238221Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2024-11-21T23:15:39.241730Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2024-11-21T23:15:39.305482Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODE4ODBhMTMtZGNjOGQ1ODYtZTM0ZTk4NzQtOTcxN2Q5MmM=, TxId: 2024-11-21T23:15:39.305553Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODE4ODBhMTMtZGNjOGQ1ODYtZTM0ZTk4NzQtOTcxN2Q5MmM=, TxId: 2024-11-21T23:15:39.306098Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Execute 2024-11-21T23:15:39.336849Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2024-11-21T23:15:39.336916Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxFinishTraversal::Complete. No ActorId to send reply. 2024-11-21T23:15:40.302159Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9319:6630]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:15:40.302501Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2024-11-21T23:15:40.302549Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9319:6630], StatRequests.size() = 1 2024-11-21T23:15:40.303406Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9321:6632]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:15:40.307309Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2024-11-21T23:15:40.307378Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9321:6632], StatRequests.size() = 1 >> TTopicApiDescribes::DescribeTopic [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |85.4%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2024-11-21T23:15:27.872232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875184420798344:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:27.872300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024aa/r3tmp/tmptHzQJj/pdisk_1.dat 2024-11-21T23:15:28.125588Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:28.127628Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875184178202549:2266];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.127715Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:28.127941Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:28.593133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:28.593255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:28.595680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:28.595753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:28.605331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:28.606195Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:15:28.608360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20004, node 1 2024-11-21T23:15:28.803901Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:28.803928Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:28.881156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:28.931615Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:28.988201Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0024aa/r3tmp/yandexNNKKh6.tmp 2024-11-21T23:15:28.988226Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0024aa/r3tmp/yandexNNKKh6.tmp 2024-11-21T23:15:28.988381Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0024aa/r3tmp/yandexNNKKh6.tmp 2024-11-21T23:15:28.988491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.154902Z INFO: TTestServer started on Port 15422 GrpcPort 20004 TClient is connected to server localhost:15422 PQClient connected to localhost:20004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:29.613340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:29.692991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:15:32.673051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875205653039166:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.673959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875205653039193:2288], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.674306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.675293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875205895635902:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.675379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875205895635909:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.683051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.693182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:15:32.827714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T23:15:32.844643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875205653039195:2289], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:32.845071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875205895635916:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:32.882550Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875184420798344:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:32.894503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:32.986811Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875184178202549:2266];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:32.986891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:33.239570Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439875205653039229:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:33.241698Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzdhYTQ4NTMtMzhkY2M0OWItZDRkNTljZWItMTBmNTk1YmU=, ActorId: [2:7439875205653039164:2284], ActorState: ExecuteState, TraceId: 01jd8g4t5f6jafm1rjkjvxng95, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:33.243975Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:33.245249Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875205895636036:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:33.247100Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTZjOWViYTgtYzEwMWJhYmUtN2NhZjA0NDYtZWVhYTcxYjU=, ActorId: [1:7439875205895635899:2304], ActorState: ExecuteState, TraceId: 01jd8g4t5h68e9z8zqyq2kygy6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:33.247532Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:33.253369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.362672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.545008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/ ... } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 7 PartitionIds: 13 TopicName: "rt3.dc1--topic-x" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "topic-x" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--topic-x" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 7 Status: Active CreateVersion: 1 TabletId: 0 } Partitions { PartitionId: 13 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 AllPartitions { PartitionId: 7 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 13 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2024-11-21T23:15:40.472419Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [1:7439875240255375754:2471] 2024-11-21T23:15:40.460713Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7439875240012778246:2396] 2024-11-21T23:15:40.476696Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7439875240255375747:2468] 2024-11-21T23:15:40.478454Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7439875240255375754:2471] 2024-11-21T23:15:40.481499Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7439875240012778248:2398] 2024-11-21T23:15:40.485747Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7439875240255375729:2466] 2024-11-21T23:15:40.486571Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7439875240012778248:2398] 2024-11-21T23:15:40.488448Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7439875240012778251:2400] 2024-11-21T23:15:40.489979Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7439875240255375729:2466] 2024-11-21T23:15:40.490917Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7439875240012778251:2400] 2024-11-21T23:15:40.509138Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7439875240255375751:2470] 2024-11-21T23:15:40.501701Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7439875240012778254:2403] 2024-11-21T23:15:40.503777Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7439875240012778254:2403] 2024-11-21T23:15:40.505436Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7439875240012778245:2395] 2024-11-21T23:15:40.508474Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7439875240012778245:2395] 2024-11-21T23:15:40.512378Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7439875240255375751:2470] 2024-11-21T23:15:40.527930Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7439875240012778252:2401] 2024-11-21T23:15:40.530074Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7439875240012778252:2401] 2024-11-21T23:15:40.530759Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7439875240255375755:2472] 2024-11-21T23:15:40.533262Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7439875240012778253:2402] 2024-11-21T23:15:40.533510Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7439875240255375755:2472] 2024-11-21T23:15:40.535657Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [1:7439875240255375750:2469] 2024-11-21T23:15:40.535358Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7439875240012778253:2402] 2024-11-21T23:15:40.537807Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7439875240012778247:2397] 2024-11-21T23:15:40.539784Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7439875240012778247:2397] 2024-11-21T23:15:40.542470Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7439875240255375750:2469] 2024-11-21T23:15:40.544880Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jd8g51dw1tg4y03thab4kpnx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI4ZWJkZTEtNmM0ZWFmZjQtNjg5MzY5ZTItYjE5OThmYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:40.547470Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7439875240255375730:2467] 2024-11-21T23:15:40.549480Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7439875240255375730:2467] ===Query complete Create topic result: 1 2024-11-21T23:15:40.629109Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875240255375823:3756]: Request location 2024-11-21T23:15:40.629596Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875240255375832:3759] connected; active server actors: 1 2024-11-21T23:15:40.629947Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2024-11-21T23:15:40.629962Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2024-11-21T23:15:40.629976Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 1 2024-11-21T23:15:40.629993Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2024-11-21T23:15:40.630021Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 1 2024-11-21T23:15:40.630036Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2024-11-21T23:15:40.630048Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 1 2024-11-21T23:15:40.630060Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 1 2024-11-21T23:15:40.630072Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 1 2024-11-21T23:15:40.630083Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 1 2024-11-21T23:15:40.630092Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 1 2024-11-21T23:15:40.630100Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 1 2024-11-21T23:15:40.630108Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 1 2024-11-21T23:15:40.630117Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 1 2024-11-21T23:15:40.630127Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 1 2024-11-21T23:15:40.630306Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875240255375823:3756]: Got location 2024-11-21T23:15:40.630963Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875240255375832:3759] disconnected; active server actors: 1 2024-11-21T23:15:40.630993Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875240255375832:3759] disconnected no session 2024-11-21T23:15:40.631362Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875240255375833:3760]: Request location 2024-11-21T23:15:40.631718Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875240255375835:3762] connected; active server actors: 1 2024-11-21T23:15:40.631920Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2024-11-21T23:15:40.631947Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2024-11-21T23:15:40.631962Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2024-11-21T23:15:40.632060Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875240255375833:3760]: Got location 2024-11-21T23:15:40.632430Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875240255375835:3762] disconnected; active server actors: 1 2024-11-21T23:15:40.632450Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875240255375835:3762] disconnected no session 2024-11-21T23:15:40.632700Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875240255375836:3763]: Request location 2024-11-21T23:15:40.633171Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875240255375838:3765] connected; active server actors: 1 >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> KqpMultishardIndex::SecondaryIndexSelectNull >> BsControllerConfig::DeleteStoragePool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2024-11-21T23:15:28.302110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875186203015243:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.302157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:28.372213Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875188087190245:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.375042Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:28.635472Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:28.671422Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00242a/r3tmp/tmpLInJJ5/pdisk_1.dat 2024-11-21T23:15:28.923415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:28.923589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:28.924905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:28.924971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:28.928823Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:15:28.929000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:28.929606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:28.950323Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29134, node 1 2024-11-21T23:15:28.974651Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:28.987743Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:29.081534Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/00242a/r3tmp/yandexROs0Qb.tmp 2024-11-21T23:15:29.081554Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/00242a/r3tmp/yandexROs0Qb.tmp 2024-11-21T23:15:29.081682Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/00242a/r3tmp/yandexROs0Qb.tmp 2024-11-21T23:15:29.081759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.120750Z INFO: TTestServer started on Port 17074 GrpcPort 29134 TClient is connected to server localhost:17074 PQClient connected to localhost:29134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:29.415959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:29.494577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:15:32.406759Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875205267059798:2288], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.407323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875205267059776:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.407409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.439139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:15:32.498461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875205267059814:2289], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:15:33.003182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.049622Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875203382885472:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:33.045759Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439875205267059849:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:33.046687Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzlhZWUxOTctMWRiYzhhM2UtODNkMWE1MmItZGViOGE0MzA=, ActorId: [2:7439875205267059772:2281], ActorState: ExecuteState, TraceId: 01jd8g4sxkce8g69tbdnfgmh07, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:33.052964Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:33.059697Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWE4YzMwMTMtMzg4MzU0NTctMjJiZGYzYzEtMzllMTc2N2M=, ActorId: [1:7439875203382885395:2302], ActorState: ExecuteState, TraceId: 01jd8g4sxs4gf0emmzt9h2gx22, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:33.060085Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:33.120180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.305329Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875186203015243:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.305435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:33.323580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.372850Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875188087190245:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.372892Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T23:15:33.742646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8g4tyxekjft0pcwaq1pq9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVjZmI4NC0zZmZiM2YxYS05NDk5Mzc2Yi01YWExNWFiNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439875207677853249:3072] === CheckClustersList. Ok 2024-11-21T23:15:43.938730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:43.938767Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:44.136478Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439875254922494360:2478] TxId: 281474976710684. Ctx: { TraceId: 01jd8g55c1e2nd8a97zrehqqdz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGQ5Zjk3MTgtM2FlMTVkNWEtZGUzNTZjZTAtZWJiYzdhMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2024-11-21T23:15:44.170271Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439875254922494364:2478], TxId: 281474976710684, task: 2. Ctx: { TraceId : 01jd8g55c1e2nd8a97zrehqqdz. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OGQ5Zjk3MTgtM2FlMTVkNWEtZGUzNTZjZTAtZWJiYzdhMGI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439875254922494360:2478], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2024-11-21T23:15:28.052392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875188242948031:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.052924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:28.131144Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875186574329872:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.131186Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:28.436129Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:28.436398Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002462/r3tmp/tmpGBrqKn/pdisk_1.dat 2024-11-21T23:15:29.097844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.097971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.104110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.104189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.129492Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:15:29.130985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:29.143729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:29.142478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:29.143383Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:29.143954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17498, node 1 2024-11-21T23:15:29.469573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002462/r3tmp/yandexu6lldC.tmp 2024-11-21T23:15:29.469597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002462/r3tmp/yandexu6lldC.tmp 2024-11-21T23:15:29.469717Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002462/r3tmp/yandexu6lldC.tmp 2024-11-21T23:15:29.469801Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.603904Z INFO: TTestServer started on Port 8174 GrpcPort 17498 TClient is connected to server localhost:8174 PQClient connected to localhost:17498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:30.309646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:30.424266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:15:33.042516Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875188242948031:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.042581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:33.132619Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875186574329872:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.132702Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:33.645169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875209717785474:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.645291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875209717785485:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.646626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.675271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875208049166745:2289], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.675330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875208049166734:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.675428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:33.690698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:15:33.751859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875208049166748:2290], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:33.751565Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T23:15:33.753872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875209717785488:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:34.017900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:34.022517Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875209717785591:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:34.023153Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDYwOGQ4NjItYTlkNDc1YmItODNlNTg1NzItNTY1MTIzNg==, ActorId: [1:7439875209717785471:2305], ActorState: ExecuteState, TraceId: 01jd8g4v2r0y96dh0ybp9yk5e0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:34.025693Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:34.026499Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439875208049166789:2294], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:34.029107Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWIzY2RjOGMtMTdiZTg3ODktYTRkN2ZkM2ItMjQzYzQ2MGQ=, ActorId: [2:7439875208049166732:2285], ActorState: ExecuteState, TraceId: 01jd8g4v4namj69bgdzva349rh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:34.029601Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:34.133760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:34.341025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: ... key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } consumer_stats { min_partitions_last_read_time { seconds: 1732230942 nanos: 54000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } topic_stats { min_last_write_time { seconds: 1732230942 nanos: 388000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2024-11-21T23:15:42.649560Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T23:15:42.649659Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" include_location: true 2024-11-21T23:15:42.649749Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2024-11-21T23:15:42.650528Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875248372493349:2553]: Request location 2024-11-21T23:15:42.651526Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875248372493351:2554] connected; active server actors: 1 2024-11-21T23:15:42.652019Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2024-11-21T23:15:42.652042Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2024-11-21T23:15:42.652055Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2024-11-21T23:15:42.652071Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2024-11-21T23:15:42.652083Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2024-11-21T23:15:42.652095Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2024-11-21T23:15:42.652108Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2024-11-21T23:15:42.652123Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2024-11-21T23:15:42.652136Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2024-11-21T23:15:42.652147Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2024-11-21T23:15:42.652159Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2024-11-21T23:15:42.652171Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2024-11-21T23:15:42.652185Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2024-11-21T23:15:42.652196Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2024-11-21T23:15:42.652208Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2024-11-21T23:15:42.652748Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875248372493349:2553]: Got location 2024-11-21T23:15:42.654223Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875248372493351:2554] disconnected; active server actors: 1 2024-11-21T23:15:42.654250Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875248372493351:2554] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1732230940867 tx_id: 281474976710678 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe topic with no stats or location 2024-11-21T23:15:42.667728Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T23:15:42.667832Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" 2024-11-21T23:15:42.667921Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1732230940867 tx_id: 281474976710678 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe bad topic 2024-11-21T23:15:42.678218Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2024-11-21T23:15:42.678328Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2024-11-21T23:15:42.678439Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } 2024-11-21T23:15:44.140686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:44.140721Z node 1 :IMPORT WARN: Table profiles were not loaded |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2024-11-21T23:15:28.028948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875187992747783:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.028993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:28.129726Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875186927847655:2121];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.129774Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002495/r3tmp/tmpQcomFP/pdisk_1.dat 2024-11-21T23:15:28.490460Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:28.517028Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:29.022427Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:29.038401Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:29.054606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.054722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.095339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.095405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.120891Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:15:29.121012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:29.125191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61667, node 1 2024-11-21T23:15:29.350367Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/002495/r3tmp/yandex4QPb0B.tmp 2024-11-21T23:15:29.350412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/002495/r3tmp/yandex4QPb0B.tmp 2024-11-21T23:15:29.350615Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/002495/r3tmp/yandex4QPb0B.tmp 2024-11-21T23:15:29.350739Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.412274Z INFO: TTestServer started on Port 3410 GrpcPort 61667 TClient is connected to server localhost:3410 PQClient connected to localhost:61667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:29.771607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:29.854999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:15:32.988287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875205172618041:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.988433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.990253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875205172618057:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.994326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:15:33.052636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875187992747783:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.053144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:33.066094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875205172618059:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:33.133183Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875186927847655:2121];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.133219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:33.479495Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439875208402684471:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:33.481795Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGUyMmRiM2ItMmNhOTkwOGMtOGFlNDQwZjUtNGQ0ZTMzYmU=, ActorId: [2:7439875208402684433:2285], ActorState: ExecuteState, TraceId: 01jd8g4tn78avevbgdh19mgbj8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:33.490863Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:33.501942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.505053Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875209467585459:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:33.507737Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTM5ZmFjNTctNzA2YjNmYWYtMTVmZjdkM2YtYTc2NzU3NmI=, ActorId: [1:7439875205172618039:2304], ActorState: ExecuteState, TraceId: 01jd8g4ter81ts9s5mqr4nqx36, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:33.508157Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:33.630253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.794730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T23:15:34.367397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd8g4vfp99ba451fpzshdtxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTJiMzQ5ZjItZDg2ZjFjMGUtOTVjZjI3ZDktNjU0ZjVhYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439875213762553193:3081] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2024-11-21T23:15:40.470509Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2024-11-21T23:15:41.255869Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:15:41.255936Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893] doesn't have tx writes info 2024-11-21T23:15:41.256203Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMet ... 075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:15:43.465211Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037896] doesn't have tx writes info 2024-11-21T23:15:43.465894Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [2:7439875251352358541:2453] 2024-11-21T23:15:43.489089Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7439875251352358535:2449] 2024-11-21T23:15:43.507237Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:15:43.507279Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx writes info 2024-11-21T23:15:43.524138Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:15:43.524176Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893] doesn't have tx writes info 2024-11-21T23:15:43.542807Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7439875252417260258:2510] 2024-11-21T23:15:43.545608Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7439875252417260259:2511] 2024-11-21T23:15:43.574595Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7439875251352358536:2450] 2024-11-21T23:15:43.579899Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:15:43.588819Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2024-11-21T23:15:43.588855Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894] doesn't have tx writes info 2024-11-21T23:15:43.593281Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] bootstrapping 4 [2:7439875251352358538:2451] 2024-11-21T23:15:43.611251Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [2:7439875251352358540:2452] 2024-11-21T23:15:43.579938Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895] doesn't have tx writes info 2024-11-21T23:15:43.580344Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7439875252417260264:2512] 2024-11-21T23:15:43.596428Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7439875252417260265:2513] 2024-11-21T23:15:43.601871Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [1:7439875252417260288:2516] 2024-11-21T23:15:43.620089Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7439875251352358573:2458] 2024-11-21T23:15:43.622486Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7439875251352358574:2459] 2024-11-21T23:15:43.635856Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7439875251352358535:2449] 2024-11-21T23:15:43.648762Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [2:7439875251352358541:2453] 2024-11-21T23:15:43.625239Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [1:7439875252417260289:2517] 2024-11-21T23:15:43.661347Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7439875251352358500:2445] 2024-11-21T23:15:43.662732Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [1:7439875252417260289:2517] 2024-11-21T23:15:43.669342Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7439875251352358536:2450] 2024-11-21T23:15:43.690971Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [1:7439875252417260288:2516] 2024-11-21T23:15:43.673291Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7439875251352358501:2446] 2024-11-21T23:15:43.674784Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [2:7439875251352358540:2452] 2024-11-21T23:15:43.676043Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 2 [2:7439875251352358538:2451] 2024-11-21T23:15:43.677680Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7439875251352358573:2458] 2024-11-21T23:15:43.690679Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7439875251352358574:2459] 2024-11-21T23:15:43.693674Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7439875252417260258:2510] 2024-11-21T23:15:43.697417Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2024-11-21T23:15:43.697438Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 2, Generation 2 2024-11-21T23:15:43.697454Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 2, Generation 2 2024-11-21T23:15:43.697470Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2024-11-21T23:15:43.697535Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2024-11-21T23:15:43.697548Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 1, Generation 2 2024-11-21T23:15:43.707735Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7439875252417260265:2513] 2024-11-21T23:15:43.709716Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 2 [1:7439875252417260264:2512] 2024-11-21T23:15:43.713740Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7439875252417260259:2511] 2024-11-21T23:15:43.814120Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2024-11-21T23:15:43.814153Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2024-11-21T23:15:43.870551Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2024-11-21T23:15:43.870674Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2024-11-21T23:15:43.870713Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7439875252417260433:2531]: Bootstrap 2024-11-21T23:15:43.871441Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875252417260433:2531]: Request location 2024-11-21T23:15:43.871892Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875252417260435:2532] connected; active server actors: 1 2024-11-21T23:15:43.872056Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2024-11-21T23:15:43.872092Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875252417260433:2531]: Got location 2024-11-21T23:15:43.872870Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875252417260435:2532] disconnected; active server actors: 1 2024-11-21T23:15:43.872895Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875252417260435:2532] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2024-11-21T23:15:43.876566Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2024-11-21T23:15:43.876661Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2024-11-21T23:15:43.877135Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7439875252417260438:2533]: Bootstrap 2024-11-21T23:15:43.877768Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875252417260438:2533]: Request location 2024-11-21T23:15:43.878180Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875252417260441:2535] connected; active server actors: 1 2024-11-21T23:15:43.878627Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2024-11-21T23:15:43.878661Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7439875252417260438:2533]: Got location 2024-11-21T23:15:43.878946Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875252417260441:2535] disconnected; active server actors: 1 2024-11-21T23:15:43.878965Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7439875252417260441:2535] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1732230943 nanos: 335000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2024-11-21T23:15:43.884234Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2024-11-21T23:15:43.884315Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2024-11-21T23:15:43.884348Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7439875252417260443:2536]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } 2024-11-21T23:15:44.002506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:44.002545Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:204:2066] recipient: [1:183:2075] Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:207:2066] recipient: [1:183:2075] 2024-11-21T23:15:27.845987Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:27.850614Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:27.853836Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:27.854316Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:27.855092Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:27.855140Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:27.855369Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:27.865063Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:27.865222Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:27.865381Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:27.865483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:27.865582Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:27.865689Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:206:2077] sender: [1:229:2066] recipient: [1:20:2067] 2024-11-21T23:15:27.877092Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:27.877269Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:27.888148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:27.888282Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:27.888361Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:27.888435Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:27.888539Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:27.888638Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:27.888686Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:27.888740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:27.900475Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:27.900641Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:27.901864Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:27.901914Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:27.902036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:27.915393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:204:2066] recipient: [11:185:2075] Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:207:2066] recipient: [11:185:2075] 2024-11-21T23:15:29.981685Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:29.983790Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:29.985333Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:29.985799Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:29.986359Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:29.986384Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:29.986589Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:29.996246Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:29.996391Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:29.996524Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:29.996641Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:29.996748Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:29.996820Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:206:2077] sender: [11:229:2066] recipient: [11:20:2067] 2024-11-21T23:15:30.010508Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:30.010725Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:30.021609Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:30.021799Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:30.021890Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:30.021983Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:30.022156Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:30.022247Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:30.022297Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:30.022355Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:30.034361Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:30.034581Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:30.035880Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:30.035944Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:30.036085Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:30.036473Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:2964:2106] recipient: [21:2865:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:2964:2106] recipient: [21:2865:2115] Leader for TabletID 72057594037932033 is [21:2966:2117] sender: [21:2967:2106] recipient: [21:2865:2115] 2024-11-21T23:15:32.605223Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:32.605909Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:32.607440Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:32.607890Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:32.608747Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:32.608779Z node 21 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:32.609023Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:32.618179Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:32.618291Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:32.618376Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:32.618640Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:32.618769Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx ... pp:355} Create new pdisk PDiskId# 87:1002 Path# /dev/disk1 2024-11-21T23:15:40.648452Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1000 Path# /dev/disk1 2024-11-21T23:15:40.648477Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1000 Path# /dev/disk3 2024-11-21T23:15:40.648507Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2024-11-21T23:15:40.648534Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1000 Path# /dev/disk1 2024-11-21T23:15:40.648560Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2024-11-21T23:15:40.648586Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1000 Path# /dev/disk1 2024-11-21T23:15:40.648612Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2024-11-21T23:15:40.648640Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1000 Path# /dev/disk1 2024-11-21T23:15:40.648667Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1001 Path# /dev/disk2 2024-11-21T23:15:40.648691Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1000 Path# /dev/disk3 2024-11-21T23:15:40.648716Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2024-11-21T23:15:40.648742Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1001 Path# /dev/disk1 2024-11-21T23:15:40.648771Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1000 Path# /dev/disk3 2024-11-21T23:15:40.648798Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2024-11-21T23:15:40.648825Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1001 Path# /dev/disk1 2024-11-21T23:15:40.648852Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 80:1002 Path# /dev/disk2 2024-11-21T23:15:40.648877Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 90:1001 Path# /dev/disk2 2024-11-21T23:15:40.648901Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1000 Path# /dev/disk3 2024-11-21T23:15:40.648926Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2024-11-21T23:15:40.648951Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1001 Path# /dev/disk1 2024-11-21T23:15:40.648978Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 82:1002 Path# /dev/disk3 2024-11-21T23:15:40.649004Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 88:1001 Path# /dev/disk3 2024-11-21T23:15:40.649030Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1001 Path# /dev/disk2 2024-11-21T23:15:40.649056Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2024-11-21T23:15:40.649081Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 79:1002 Path# /dev/disk1 2024-11-21T23:15:40.649106Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 83:1002 Path# /dev/disk2 2024-11-21T23:15:40.649132Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1001 Path# /dev/disk2 2024-11-21T23:15:40.649157Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1000 Path# /dev/disk3 2024-11-21T23:15:40.649183Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 85:1002 Path# /dev/disk3 2024-11-21T23:15:40.649210Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 91:1001 Path# /dev/disk3 2024-11-21T23:15:40.649236Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1001 Path# /dev/disk2 2024-11-21T23:15:40.649260Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1001 Path# /dev/disk2 2024-11-21T23:15:40.649286Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1000 Path# /dev/disk1 2024-11-21T23:15:40.649310Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 88:1002 Path# /dev/disk2 2024-11-21T23:15:40.649336Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1000 Path# /dev/disk3 2024-11-21T23:15:40.649363Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 86:1002 Path# /dev/disk3 2024-11-21T23:15:40.649389Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1001 Path# /dev/disk2 2024-11-21T23:15:40.649413Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 81:1002 Path# /dev/disk2 2024-11-21T23:15:40.649441Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 91:1002 Path# /dev/disk2 2024-11-21T23:15:40.649467Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1000 Path# /dev/disk3 2024-11-21T23:15:40.649496Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 89:1001 Path# /dev/disk3 2024-11-21T23:15:40.649520Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1001 Path# /dev/disk2 2024-11-21T23:15:40.649547Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1001 Path# /dev/disk2 2024-11-21T23:15:40.649573Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 94:1002 Path# /dev/disk2 2024-11-21T23:15:40.649598Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1000 Path# /dev/disk3 2024-11-21T23:15:40.649624Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 92:1001 Path# /dev/disk3 2024-11-21T23:15:40.649651Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1001 Path# /dev/disk2 2024-11-21T23:15:40.649677Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 89:1002 Path# /dev/disk2 2024-11-21T23:15:40.649702Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1000 Path# /dev/disk3 2024-11-21T23:15:40.649727Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 95:1001 Path# /dev/disk2 2024-11-21T23:15:40.649752Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1000 Path# /dev/disk3 2024-11-21T23:15:40.649777Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1001 Path# /dev/disk1 2024-11-21T23:15:40.649804Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2024-11-21T23:15:40.649830Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 77:1002 Path# /dev/disk1 2024-11-21T23:15:40.649856Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 84:1002 Path# /dev/disk3 2024-11-21T23:15:40.649880Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 90:1002 Path# /dev/disk3 2024-11-21T23:15:40.649904Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1000 Path# /dev/disk2 2024-11-21T23:15:40.649929Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2024-11-21T23:15:40.649955Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 75:1002 Path# /dev/disk1 2024-11-21T23:15:40.649981Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 92:1002 Path# /dev/disk2 2024-11-21T23:15:40.650009Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1001 Path# /dev/disk3 2024-11-21T23:15:40.650037Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1001 Path# /dev/disk3 2024-11-21T23:15:40.650065Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2024-11-21T23:15:40.650090Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 73:1002 Path# /dev/disk1 2024-11-21T23:15:40.650115Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2024-11-21T23:15:40.650139Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 76:1002 Path# /dev/disk1 2024-11-21T23:15:40.650164Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 93:1002 Path# /dev/disk3 2024-11-21T23:15:40.650189Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1001 Path# /dev/disk2 2024-11-21T23:15:40.650214Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 71:1002 Path# /dev/disk2 2024-11-21T23:15:40.650239Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2024-11-21T23:15:40.650263Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 72:1002 Path# /dev/disk1 2024-11-21T23:15:40.650288Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2024-11-21T23:15:40.650313Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 78:1002 Path# /dev/disk1 2024-11-21T23:15:40.650337Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 95:1002 Path# /dev/disk3 2024-11-21T23:15:40.650362Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 74:1002 Path# /dev/disk2 2024-11-21T23:15:40.666610Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T23:15:40.767998Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2024-11-21T23:15:40.829499Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } >> KqpMultishardIndex::DataColumnUpsertMixedSemantic-StreamLookup [GOOD] >> KqpMultishardIndex::DataColumnWrite+StreamLookup >> KqpUniqueIndex::ReplaceFkAlreadyExist >> KqpUniqueIndex::InsertFkPartialColumnSet |85.4%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::QueryStatsRetries [GOOD] >> SystemView::StoragePoolsFields >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> SystemView::TabletsFields [GOOD] |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> SystemView::TabletsFollowers >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex |85.4%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> BindQueue::Basic >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsTables >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::SelectingColumns |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |85.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> SystemView::PDisksFields [GOOD] >> SystemView::GroupsFields >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::QueryStats >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2024-11-21T23:15:52.132667Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:15:52.138582Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:15:52.140049Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:15:52.144335Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:15:52.144503Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:15:52.145356Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fed/r3tmp/tmpjE0Ek7/pdisk_1.dat 2024-11-21T23:15:52.951274Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:535:2457] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1293:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:15:52.951436Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1293:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:52.951492Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1293:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:52.951519Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1293:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:52.951566Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1293:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:52.951590Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1293:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:52.951615Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1293:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:52.951655Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:1293:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:15:52.951727Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1293:1] Marker# BPG33 2024-11-21T23:15:52.951769Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1293:1] Marker# BPG32 2024-11-21T23:15:52.951812Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1293:2] Marker# BPG33 2024-11-21T23:15:52.951847Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1293:2] Marker# BPG32 2024-11-21T23:15:52.951875Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1293:3] Marker# BPG33 2024-11-21T23:15:52.951898Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1293:3] Marker# BPG32 2024-11-21T23:15:52.952069Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:62:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1293:3] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:15:52.952127Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:55:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1293:2] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:15:52.952168Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:76:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1293:1] FDS# 1293 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:15:52.955018Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1293:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:15:52.955276Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1293:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:15:52.955363Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1293:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90181 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:15:52.955439Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1293:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:15:52.955511Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1293:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:15:53.100547Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] bootstrap ActorId# [1:579:2494] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:222:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:15:53.100714Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:53.100763Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:53.100793Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:53.100821Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:53.100849Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:53.100879Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:53.100918Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] restore Id# [72057594037932033:2:8:0:0:222:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:15:53.100994Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG33 2024-11-21T23:15:53.101123Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG32 2024-11-21T23:15:53.101168Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG33 2024-11-21T23:15:53.101199Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG32 2024-11-21T23:15:53.101240Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG33 2024-11-21T23:15:53.101268Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG32 2024-11-21T23:15:53.101430Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:62:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:3] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:15:53.101498Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:55:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:2] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:15:53.101547Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:76:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:1] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:15:53.115002Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:15:53.115368Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:15:53.115510Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:15:53.115600Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:15:53.115701Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:15:53.116957Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:15:53.117003Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T23:15:53.117106Z nod ... 5:53.171263Z node 1 :BS_PROXY_PUT NOTICE: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 Sending TEvPut 2024-11-21T23:15:53.171716Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] bootstrap ActorId# [1:595:2507] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T23:15:53.171827Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:53.171880Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:15:53.171938Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2024-11-21T23:15:53.171983Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2024-11-21T23:15:53.172099Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:15:53.177107Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T23:15:53.177249Z node 1 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2024-11-21T23:15:53.177321Z node 1 :BS_PROXY_PUT INFO: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:15:53.177952Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:15:53.178004Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T23:15:53.178108Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2024-11-21T23:15:53.178855Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/dl5p/001fed/r3tmp/tmpjE0Ek7//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T23:15:53.179943Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T23:15:53.179994Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:15:53.182052Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:599:2104] targetNodeId# 1 Marker# DSP01 2024-11-21T23:15:53.182197Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:600:2105] targetNodeId# 1 Marker# DSP01 2024-11-21T23:15:53.182324Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:601:2106] targetNodeId# 1 Marker# DSP01 2024-11-21T23:15:53.182588Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:602:2107] targetNodeId# 1 Marker# DSP01 2024-11-21T23:15:53.182728Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:603:2108] targetNodeId# 1 Marker# DSP01 2024-11-21T23:15:53.182862Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:604:2109] targetNodeId# 1 Marker# DSP01 2024-11-21T23:15:53.182986Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:597:2103] Create Queue# [2:605:2110] targetNodeId# 1 Marker# DSP01 2024-11-21T23:15:53.183029Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:15:53.184560Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:15:53.184771Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:15:53.184928Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:15:53.185112Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:15:53.185179Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:15:53.185236Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:15:53.185524Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:15:53.185557Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T23:15:53.185591Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T23:15:53.185743Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [2:608:2111] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T23:15:53.185800Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T23:15:53.185995Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:599:2104] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 17563175221683821571 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T23:15:53.187888Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T23:15:53.187957Z node 2 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2024-11-21T23:15:53.188303Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T23:15:53.188520Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T23:15:53.188855Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:609:2508] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T23:15:53.189003Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:15:53.189057Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:15:53.189118Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2024-11-21T23:15:53.189166Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2024-11-21T23:15:53.189312Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:15:53.189522Z node 1 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2024-11-21T23:15:53.189813Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T23:15:53.189915Z node 1 :BS_PROXY_PUT ERROR: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2024-11-21T23:15:53.189979Z node 1 :BS_PROXY_PUT NOTICE: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:15:53.190424Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:599:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> SystemView::TabletsFollowers [GOOD] >> SystemView::TabletsRanges >> SystemView::ConcurrentScans [GOOD] >> SystemView::Describe >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 2858, MsgBus: 23301 2024-11-21T23:15:38.610966Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875229558472272:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:38.611498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001438/r3tmp/tmphpgjgB/pdisk_1.dat 2024-11-21T23:15:39.457137Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:39.526095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:39.534668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:39.541769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2858, node 1 2024-11-21T23:15:39.807122Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:39.807153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:39.807163Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:39.807265Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23301 TClient is connected to server localhost:23301 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:40.463098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.536406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.674342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.881347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.983759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:43.588031Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875229558472272:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:43.588114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:43.897160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875251033310317:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:43.897295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:45.206343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.301522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.343022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.400733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.481710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.551104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.687838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875259623245422:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:45.687931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:45.688320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875259623245428:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:45.692520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:45.736765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875259623245430:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:15:47.241140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:15:48.508350Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 21702, MsgBus: 16558 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001438/r3tmp/tmpjWT1JT/pdisk_1.dat 2024-11-21T23:15:49.662606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:49.677318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:49.677411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:49.679528Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:49.694225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21702, node 2 2024-11-21T23:15:49.828406Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:49.828435Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:49.828443Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:49.828554Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16558 TClient is connected to server localhost:16558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:50.222092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.228046Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:15:50.243652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.342986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.547073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.627082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:53.165167Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875296429894109:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.165259Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.203917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.241184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.288701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.330845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.410742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.491582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.571988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875296429894612:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.572145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.572472Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875296429894618:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.582088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:53.606556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875296429894620:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:15:54.877442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.979563Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 3280, MsgBus: 32170 2024-11-21T23:15:39.307692Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875235691950772:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:39.307986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00142d/r3tmp/tmpMN2sbq/pdisk_1.dat 2024-11-21T23:15:40.277518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:40.366987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:40.367078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:40.377446Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:40.419707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3280, node 1 2024-11-21T23:15:40.659040Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:40.659060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:40.659067Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:40.659152Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32170 TClient is connected to server localhost:32170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:42.397871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:42.453327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:15:42.469004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:42.691570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:43.329232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:43.581071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:44.282712Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875235691950772:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:44.282773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:46.880245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875265756723413:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:46.880351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:47.248240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:47.325539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:47.358939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:47.397278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:47.428708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:47.472560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:47.568461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875270051691221:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:47.568571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:47.568873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875270051691226:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:47.575898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:47.593195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875270051691228:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:15:49.010750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:55.371316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:55.371484Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:55.934237Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g5fqb6bft1e57f3q5acpq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjY4ZTIzY2ItYzg2OGI2N2UtZTdlOTdmYS01OGQ0NjBlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:15:55.948815Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjY4ZTIzY2ItYzg2OGI2N2UtZTdlOTdmYS01OGQ0NjBlMQ==, ActorId: [1:7439875278641626895:2528], ActorState: ExecuteState, TraceId: 01jd8g5fqb6bft1e57f3q5acpq, Create QueryResponse for error on request, msg: >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Delete nodeId# 53 Add nodeId# 101 Pick Delete nodeId# 89 Add nodeId# 102 Disable nodeId# 92 Pick Enable nodeId# 92 Delete nodeId# 97 Delete nodeId# 84 Disable nodeId# 74 Pick Pick Add nodeId# 103 Delete nodeId# 80 Delete nodeId# 79 Disable nodeId# 2 Disable nodeId# 34 Pick Add nodeId# 104 Enable nodeId# 2 Enable nodeId# 34 Enable nodeId# 74 Disable nodeId# 90 Delete nodeId# 96 Delete nodeId# 85 Enable nodeId# 90 Delete nodeId# 29 Pick Delete nodeId# 17 Pick Add nodeId# 105 Add nodeId# 106 Disable nodeId# 86 Add nodeId# 107 Disable nodeId# 61 Disable nodeId# 75 Delete nodeId# 91 Delete nodeId# 101 Pick Delete nodeId# 98 Add nodeId# 108 Add nodeId# 109 Add nodeId# 110 Delete nodeId# 20 Add nodeId# 111 Disable nodeId# 83 Add nodeId# 112 Disable nodeId# 45 Add nodeId# 113 Enable nodeId# 75 Enable nodeId# 45 Pick Add nodeId# 114 Pick Enable nodeId# 61 Enable nodeId# 86 Disable nodeId# 5 Pick Disable nodeId# 36 Delete nodeId# 33 Pick Pick Enable nodeId# 83 Delete nodeId# 30 Delete nodeId# 75 Add nodeId# 115 Add nodeId# 116 Pick Add nodeId# 117 Pick Delete nodeId# 59 Disable nodeId# 27 Add nodeId# 118 Disable nodeId# 15 Pick Disable nodeId# 114 Enable nodeId# 36 Disable nodeId# 58 Delete nodeId# 109 Enable nodeId# 58 Disable nodeId# 26 Delete nodeId# 26 Pick Disable nodeId# 58 Enable nodeId# 15 Add nodeId# 119 Disable nodeId# 9 Disable nodeId# 81 Delete nodeId# 13 Disable nodeId# 112 Add nodeId# 120 Disable nodeId# 48 Pick Enable nodeId# 27 Enable nodeId# 114 Delete nodeId# 99 Delete nodeId# 115 Disable nodeId# 36 Add nodeId# 121 Delete nodeId# 86 Add nodeId# 122 Disable nodeId# 15 Pick Delete nodeId# 52 Delete nodeId# 41 Delete nodeId# 10 Add nodeId# 123 Enable nodeId# 15 Add nodeId# 124 Enable nodeId# 81 Pick Add nodeId# 125 Pick Delete nodeId# 102 Disable nodeId# 51 Disable nodeId# 76 Pick Enable nodeId# 58 Pick Delete nodeId# 125 Disable nodeId# 116 Pick Add nodeId# 126 Delete nodeId# 110 Delete nodeId# 7 Add nodeId# 127 Pick Disable nodeId# 122 Pick Pick Enable nodeId# 76 Enable nodeId# 122 Add nodeId# 128 Pick Add nodeId# 129 Enable nodeId# 5 Pick Add nodeId# 130 Disable nodeId# 40 Delete nodeId# 114 Add nodeId# 131 Enable nodeId# 9 Enable nodeId# 40 Pick Pick Add nodeId# 132 Delete nodeId# 112 Disable nodeId# 111 Delete nodeId# 40 Pick Add nodeId# 133 Disable nodeId# 14 Enable nodeId# 116 Add nodeId# 134 Delete nodeId# 129 Pick Pick Add nodeId# 135 Enable nodeId# 36 Disable nodeId# 117 Enable nodeId# 111 Enable nodeId# 14 Pick Enable nodeId# 117 Disable nodeId# 32 Delete nodeId# 43 Enable nodeId# 48 Disable nodeId# 68 Pick Disable nodeId# 87 Delete nodeId# 55 Delete nodeId# 32 Enable nodeId# 51 Add nodeId# 136 Enable nodeId# 87 Delete nodeId# 113 Add nodeId# 137 Enable nodeId# 68 Disable nodeId# 132 Pick Delete nodeId# 60 Enable nodeId# 132 Add nodeId# 138 Delete nodeId# 22 Disable nodeId# 92 Add nodeId# 139 Disable nodeId# 116 Disable nodeId# 58 Pick Pick Pick Enable nodeId# 58 Enable nodeId# 116 Pick Pick Pick Pick Add nodeId# 140 Pick Pick Add nodeId# 141 Enable nodeId# 92 Delete nodeId# 134 Add nodeId# 142 Delete nodeId# 139 Delete nodeId# 119 Disable nodeId# 88 Disable nodeId# 48 Disable nodeId# 34 Disable nodeId# 3 Disable nodeId# 70 Delete nodeId# 116 Enable nodeId# 88 Delete nodeId# 82 Enable nodeId# 48 Disable nodeId# 21 Disable nodeId# 31 Add nodeId# 143 Disable nodeId# 128 Disable nodeId# 76 Pick Pick Disable nodeId# 11 Delete nodeId# 16 Add nodeId# 144 Pick Disable nodeId# 123 Pick Delete nodeId# 47 Pick Delete nodeId# 2 Delete nodeId# 18 Disable nodeId# 68 Delete nodeId# 21 Pick Disable nodeId# 48 Add nodeId# 145 Enable nodeId# 123 Pick Add nodeId# 146 Pick Enable nodeId# 68 Enable nodeId# 128 Add nodeId# 147 Disable nodeId# 118 Add nodeId# 148 Add nodeId# 149 Pick Delete nodeId# 34 Enable nodeId# 118 Pick Enable nodeId# 31 Pick Disable nodeId# 24 Enable nodeId# 48 Delete nodeId# 108 Enable nodeId# 3 Enable nodeId# 24 Enable nodeId# 70 Disable nodeId# 35 Delete nodeId# 146 Add nodeId# 150 Add nodeId# 151 Pick Enable nodeId# 11 Delete nodeId# 9 Add nodeId# 152 Enable nodeId# 76 Add nodeId# 153 Pick Pick Pick Enable nodeId# 35 Pick Add nodeId# 154 Delete nodeId# 46 Delete nodeId# 54 Disable nodeId# 150 Add nodeId# 155 Add nodeId# 156 Delete nodeId# 137 Add nodeId# 157 Disable nodeId# 63 Add nodeId# 158 Disable nodeId# 27 Delete nodeId# 132 Delete nodeId# 6 Add nodeId# 159 Enable nodeId# 27 Delete nodeId# 159 Disable nodeId# 149 Pick Enable nodeId# 63 Disable nodeId# 27 Add nodeId# 160 Add nodeId# 161 Disable nodeId# 90 Enable nodeId# 90 Delete nodeId# 157 Disable nodeId# 138 Pick Delete nodeId# 143 Add nodeId# 162 Disable nodeId# 37 Disable nodeId# 15 Delete nodeId# 106 Pick Add nodeId# 163 Add nodeId# 164 Enable nodeId# 150 Enable nodeId# 37 Disable nodeId# 154 Delete nodeId# 152 Add nodeId# 165 Pick Delete nodeId# 130 Pick Delete nodeId# 71 Add nodeId# 166 Pick Add nodeId# 167 Enable nodeId# 27 Pick Delete nodeId# 50 Delete nodeId# 62 Delete nodeId# 31 Enable nodeId# 138 Enable nodeId# 15 Pick Disable nodeId# 15 Disable nodeId# 87 Enable nodeId# 87 Enable nodeId# 15 Pick Disable nodeId# 19 Enable nodeId# 154 Disable nodeId# 73 Add nodeId# 168 Delete nodeId# 15 Enable nodeId# 73 Pick Add nodeId# 169 Enable nodeId# 19 Delete nodeId# 87 Enable nodeId# 149 Add nodeId# 170 Disable nodeId# 92 Add nodeId# 171 Add nodeId# 172 Enable nodeId# 92 Delete nodeId# 103 Disable nodeId# 131 Add nodeId# 173 Disable nodeId# 48 Disable nodeId# 49 Disable nodeId# 104 Disable nodeId# 145 Pick Enable nodeId# 131 Delete nodeId# 14 Enable nodeId# 49 Pick Add nodeId# 174 Pick Pick Disable nodeId# 4 Pick Delete nodeId# 93 Disable nodeId# 73 Enable nodeId# 4 Enable nodeId# 73 Delete nodeId# 76 Add nodeId# 175 Enable nodeId# 145 Add nodeId# 176 Pick Disable nodeId# 12 Disable nodeId# 168 Enable nodeId# 168 Enable nodeId# 104 Disable nodeId# 4 Disable nodeId# 61 Disable nodeId# 145 Disable nodeId# 127 Enable nodeId# 127 Delete nodeId# 94 Pick Enable nodeId# 4 Enable nodeId# 48 Enable nodeId# 61 Disable nodeId# 48 Delete nodeId# 90 Enable nodeId# 145 Disable nodeId# 8 Pick Delete nodeId# 142 Delete nodeId# 140 Enable nodeId# 8 Enable nodeId# 48 Pick Delete nodeId# 124 Disable nodeId# 126 Enable nodeId# 126 Add nodeId# 177 Pick Enable nodeId# 12 Delete nodeId# 11 Delete nodeId# 128 Pick Pick Disable nodeId# 74 Delete nodeId# 74 Pick Delete nodeId# 69 Disable nodeId# 177 Pick Add nodeId# 178 Enable nodeId# 177 Add nodeId# 179 Pick Disable nodeId# 92 Pick Add nodeId# 180 Disable nodeId# 156 Enable nodeId# 92 Enable nodeId# 156 Delete nodeId# 145 Delete nodeId# 141 Pick Add nodeId# 181 Add nodeId# 182 Disable nodeId# 160 Pick Enable nodeId# 160 Pick Delete nodeId# 49 Add nodeId# 183 Disable nodeId# 42 Pick Delete nodeId# 35 Pick Delete nodeId# 24 Enable nodeId# 42 Add nodeId# 184 Add nodeId# 185 Pick Add nodeId# 186 Pick Pick Add nodeId# 187 Pick Disable nodeId# 3 Add nodeId# 188 Disable nodeId# 149 Delete nodeId# 105 Disable nodeId# 77 Disable nodeId# 182 Add nodeId# 189 Delete nodeId# 39 Pick Disable nodeId# 56 Pick Delete nodeId# 38 Enable nodeId# 3 Enable nodeId# 149 Enable nodeId# 77 Enable nodeId# 182 Delete nodeId# 144 Enable nodeId# 56 Add nodeId# 190 Delete nodeId# 126 Disable nodeId# 45 Disable nodeId# 107 Disable nodeId# 136 Delete nodeId# 28 Pick Pick Disable nodeId# 111 Add nodeId# 191 Delete nodeId# 163 Add nodeId# 192 Pick Add nodeId# 193 Delete nodeId# 23 Pick Delete nodeId# 169 Disable nodeId# 180 Delete nodeId# 153 Add nodeId# 194 Pick Enable nodeId# 45 Add nodeId# 195 Pick Disable nodeId# 183 Delete nodeId# 122 Add nodeId# 196 Enable nodeId# 107 Pick Disable nodeId# 12 Enable nodeId# 136 Add nodeId# 197 Enable nodeId# 183 Disable nodeId# 107 Delete nodeId# 3 Delete nodeId# 171 Pick Delete nodeId# 164 Disable nodeId# 170 Add nodeId# 198 Disable nodeId# 66 Enable nodeId# 180 Disable nodeId# 174 Enable nodeId# 174 Add nodeId# 199 Add nodeId# 200 Pick Disable nodeId# 37 Delete nodeId# 195 Disable nodeId# 63 Enable nodeId# 63 Pick Delete nodeId# 150 Disable nodeId# 197 Delete nodeId# 68 Delete nodeId# 73 Disable nodeId# 77 Enable nodeId# 37 Delete nodeId# 168 Pick Pick Add nodeId# 201 Pick Enable nodeId# 66 Add nodeId# 202 Add nodeId# 203 Delete nodeId# 147 Disable nodeId# 48 Disable nodeId# 177 Add nodeId# 204 Pick Add nodeId# 205 Add nodeId# 206 Add nodeId# 207 Delete nodeId# 118 Delete nodeId# 5 Enable nodeId# 111 Enable nodeId# 170 Pick Delete nodeId# 199 Delete nodeId# 83 Pick Disable nodeId# 81 Pick Delete nodeId# 12 Add nodeId# 208 Enable nodeId# 197 Pick Disable nodeId# 64 Add nodeId# 209 Disable nodeId# 78 Add nodeId# 210 Disable nodeId# 207 Pick Disable nodeId# 58 Add nodeId# 211 Disable nodeId# 198 Enable nodeId# 78 Disable nodeId# 42 Pick Disable nodeId# 154 Delete nodeId# 72 Delete nodeId# 191 Delete nodeId# 149 Add nodeId# 212 Enable nodeId# 81 Add nodeId# 213 Enable nodeId# 42 Add nodeId# 214 Add nodeId# 215 Add nodeId# 216 Add nodeId# 217 Disable nodeId# 95 Disable nodeId# 186 Disable nodeId# 25 Disable nodeId# 213 Pick Disable nodeId# 56 Pick Enable nodeId# 77 Delete nodeId# 45 Add nodeId# 218 Enable nodeId# 95 Delete nodeId# 215 Add nodeId# 219 Pick Pick Delete nodeId# 131 Pick Pick Delete nodeId# 63 Pick Delete nodeId# 214 Disable nodeId# 197 Delete nodeId# 158 Add nodeId# 220 Pick Disable nodeId# 44 Delete nodeId# 135 Pick Disable nodeId# 219 Add nodeId# 221 Pick Pick Enable nodeId# 198 Delete nodeId# 184 Delete nodeId# 219 Delete nodeId# 220 Disable nodeId# 61 Enable nodeId# 213 Add nodeId# 222 Enable nodeId# 56 Delete nodeId# 206 Enable nodeId# 107 Delete nodeId# 182 Add nodeId# 223 Delete nodeId# 179 Disable nodeId# 123 Delete nodeId# 197 Delete nodeId# 167 Enable nodeId# 48 Disable nodeId# 209 Enable nodeId# 25 Disable nodeId# 42 Disable nodeId# 166 Enable nodeId# 166 Disable nodeId# 65 Add nodeId# 224 Disable nodeId# 104 Delete nodeId# 221 Pick Add nodeId# 225 Disable nodeId# 27 Disable nodeId# 51 Add nodeId# 226 Disable nodeId# 19 Delete nodeId# 204 Disable nodeId# 205 Delete nodeId# 48 Delete nodeId# 57 Add nodeId# 227 Enable nodeId# 209 Add nodeId# 228 Pick Add nodeId# 229 Pick Enable nodeId# 154 Pick Disable nodeId# 198 Enable nodeId# 104 Delete nodeId# 173 Delete nodeId# 42 Enable nodeId# 27 Delete nodeId# 161 Delete nodeId# 155 Delete nodeId# 162 Disable nodeId# 200 Disable nodeId# 176 Enable nodeId# 65 Pick Delete nodeId# 177 Add nodeId# 230 Disable nodeId# 226 Delete nodeId# 212 Disable nodeId# 185 Delete nodeId# 19 Delete nodeId# 202 Disable nodeId# 120 Disable nodeId# 229 Delete nodeId# 213 Delete nodeId# 181 Delete nodeId# 100 Enable nodeId# 51 Delete nodeId# 56 Disable nodeId# 172 Enable nodeId# 44 Add nodeId# 231 Add nodeId# 232 Disable nodeId# 193 Pick Enable nodeId# 120 Pick Disable nodeId# 210 Disable nodeId# 4 Add nodeId# 233 Pick Add nodeId# 234 Add nodeId# 235 Delete nodeId# 123 Pick Enable nodeId# 172 Add nodeId# 236 Pick Delete nodeId# 111 Add nodeId# 237 Pick Enable nodeId# 210 Enable nodeId# 205 Enable nodeId# 186 Disable nodeId# 201 Delete nodeId# 51 Pick Enable nodeId# 176 Disable nodeId# 156 Delete nodeId# 92 Disable nodeId# 205 Disable nodeId# 8 Pick Disable nodeId# 210 Disable nodeId# 104 Disable nodeId# 208 Disable nodeId# 117 Disable nodeId# 165 Enable nodeId# 207 Enable nodeId# 185 Disable nodeId# 237 Pick Disable nodeId# 81 Pick Add nodeId# 238 Disable nodeId# 180 Delete nodeId# 223 Pick Add nodeId# 239 Delete nodeId# 224 Enable nodeId# 64 Delete nodeId# 228 Add nodeId# 240 Add nodeId# 241 Pick Pick Delete nodeId# 192 Pick Add nodeId# 242 Pick Disable nodeId# 196 Enable nodeId# 205 Pick Disable nodeId# 190 Enable nodeId# 229 Delete nodeId# 8 Pick Add nodeId# 243 Pick Disable nodeId# 95 Delete nodeId# 37 Delete nodeId# 235 Pick Add nodeId# 244 Add nodeId# 245 Disable nodeId# 151 Add nodeId# 246 Enable nodeId# 165 Delete nodeId# 194 Disable nodeId# 77 Disable nodeId# 225 Add nodeId# 247 Pick Pick Enable nodeId# 180 Delete nodeId# 233 Add nodeId# 248 Pick Enable nodeId# 237 Pick Enable nodeId# 200 Disable nodeId# 136 Delete ... 20131 Delete nodeId# 20145 Pick Add nodeId# 20233 Add nodeId# 20234 Delete nodeId# 20180 Add nodeId# 20235 Enable nodeId# 20206 Delete nodeId# 20176 Disable nodeId# 20229 Enable nodeId# 20186 Pick Disable nodeId# 20214 Enable nodeId# 20203 Enable nodeId# 20209 Add nodeId# 20236 Enable nodeId# 20225 Disable nodeId# 20192 Disable nodeId# 20163 Delete nodeId# 20154 Delete nodeId# 20214 Add nodeId# 20237 Disable nodeId# 20220 Add nodeId# 20238 Pick Delete nodeId# 20233 Enable nodeId# 20229 Enable nodeId# 20208 Disable nodeId# 20226 Pick Pick Enable nodeId# 20201 Disable nodeId# 20216 Enable nodeId# 20192 Add nodeId# 20239 Disable nodeId# 20238 Disable nodeId# 20202 Add nodeId# 20240 Pick Disable nodeId# 20092 Disable nodeId# 20192 Add nodeId# 20241 Delete nodeId# 20195 Add nodeId# 20242 Add nodeId# 20243 Disable nodeId# 20125 Disable nodeId# 20235 Disable nodeId# 20204 Delete nodeId# 20224 Add nodeId# 20244 Add nodeId# 20245 Disable nodeId# 20149 Enable nodeId# 20125 Pick Enable nodeId# 20238 Disable nodeId# 20109 Add nodeId# 20246 Enable nodeId# 20204 Disable nodeId# 20245 Disable nodeId# 20187 Disable nodeId# 20207 Delete nodeId# 20206 Pick Disable nodeId# 20246 Pick Pick Add nodeId# 20247 Pick Add nodeId# 20248 Add nodeId# 20249 Delete nodeId# 20228 Enable nodeId# 20226 Delete nodeId# 20202 Enable nodeId# 20235 Add nodeId# 20250 Pick Disable nodeId# 20234 Add nodeId# 20251 Delete nodeId# 20109 Add nodeId# 20252 Delete nodeId# 20236 Enable nodeId# 20223 Delete nodeId# 20188 Pick Delete nodeId# 20182 Disable nodeId# 20244 Pick Pick Enable nodeId# 20207 Enable nodeId# 20246 Delete nodeId# 20247 Disable nodeId# 20217 Delete nodeId# 20125 Enable nodeId# 20245 Add nodeId# 20253 Enable nodeId# 20149 Delete nodeId# 20239 Disable nodeId# 20240 Add nodeId# 20254 Disable nodeId# 20241 Delete nodeId# 20201 Add nodeId# 20255 Disable nodeId# 20246 Disable nodeId# 20251 Enable nodeId# 20234 Add nodeId# 20256 Add nodeId# 20257 Disable nodeId# 20204 Enable nodeId# 20244 Enable nodeId# 20241 Delete nodeId# 20238 Pick Add nodeId# 20258 Pick Pick Pick Add nodeId# 20259 Enable nodeId# 20092 Pick Add nodeId# 20260 Pick Disable nodeId# 20194 Pick Delete nodeId# 20253 Pick Add nodeId# 20261 Add nodeId# 20262 Disable nodeId# 20219 Pick Delete nodeId# 20208 Disable nodeId# 20250 Pick Add nodeId# 20263 Disable nodeId# 20223 Disable nodeId# 20229 Add nodeId# 20264 Disable nodeId# 20222 Disable nodeId# 20227 Enable nodeId# 20219 Add nodeId# 20265 Add nodeId# 20266 Disable nodeId# 20186 Disable nodeId# 20147 Delete nodeId# 20264 Add nodeId# 20267 Delete nodeId# 20246 Disable nodeId# 20225 Enable nodeId# 20220 Enable nodeId# 20187 Pick Pick Enable nodeId# 20251 Enable nodeId# 20223 Enable nodeId# 20194 Pick Pick Disable nodeId# 20220 Disable nodeId# 20267 Add nodeId# 20268 Pick Add nodeId# 20269 Pick Disable nodeId# 20261 Add nodeId# 20270 Delete nodeId# 20255 Add nodeId# 20271 Disable nodeId# 20210 Enable nodeId# 20250 Disable nodeId# 20245 Enable nodeId# 20204 Enable nodeId# 20229 Enable nodeId# 20245 Delete nodeId# 20269 Delete nodeId# 20207 Add nodeId# 20272 Delete nodeId# 20225 Pick Disable nodeId# 20263 Enable nodeId# 20263 Pick Add nodeId# 20273 Delete nodeId# 20204 Disable nodeId# 20265 Enable nodeId# 20267 Pick Pick Delete nodeId# 20254 Add nodeId# 20274 Disable nodeId# 20274 Enable nodeId# 20265 Pick Pick Pick Enable nodeId# 20163 Pick Add nodeId# 20275 Add nodeId# 20276 Pick Disable nodeId# 20241 Delete nodeId# 20273 Delete nodeId# 20210 Add nodeId# 20277 Add nodeId# 20278 Enable nodeId# 20147 Delete nodeId# 20184 Delete nodeId# 20194 Pick Disable nodeId# 20267 Delete nodeId# 20187 Disable nodeId# 20223 Pick Disable nodeId# 20251 Disable nodeId# 20262 Enable nodeId# 20227 Disable nodeId# 20221 Enable nodeId# 20241 Enable nodeId# 20220 Enable nodeId# 20267 Enable nodeId# 20240 Add nodeId# 20279 Disable nodeId# 20266 Disable nodeId# 20203 Disable nodeId# 20271 Disable nodeId# 20235 Add nodeId# 20280 Enable nodeId# 20266 Enable nodeId# 20271 Pick Delete nodeId# 20209 Disable nodeId# 20215 Disable nodeId# 20278 Delete nodeId# 20232 Enable nodeId# 20215 Disable nodeId# 20272 Pick Add nodeId# 20281 Enable nodeId# 20221 Add nodeId# 20282 Disable nodeId# 20219 Pick Disable nodeId# 20271 Pick Add nodeId# 20283 Add nodeId# 20284 Delete nodeId# 20258 Add nodeId# 20285 Delete nodeId# 20217 Disable nodeId# 20259 Pick Disable nodeId# 20147 Delete nodeId# 20259 Enable nodeId# 20223 Disable nodeId# 20221 Disable nodeId# 20237 Enable nodeId# 20272 Pick Add nodeId# 20286 Delete nodeId# 20147 Add nodeId# 20287 Delete nodeId# 20220 Add nodeId# 20288 Pick Add nodeId# 20289 Enable nodeId# 20262 Add nodeId# 20290 Delete nodeId# 20203 Add nodeId# 20291 Delete nodeId# 20223 Pick Enable nodeId# 20219 Disable nodeId# 20275 Add nodeId# 20292 Add nodeId# 20293 Add nodeId# 20294 Disable nodeId# 20227 Add nodeId# 20295 Pick Delete nodeId# 20279 Pick Add nodeId# 20296 Pick Disable nodeId# 20263 Add nodeId# 20297 Enable nodeId# 20222 Delete nodeId# 20219 Delete nodeId# 20268 Disable nodeId# 20296 Add nodeId# 20298 Delete nodeId# 20291 Disable nodeId# 20298 Disable nodeId# 20231 Pick Disable nodeId# 20215 Enable nodeId# 20261 Pick Disable nodeId# 20249 Add nodeId# 20299 Delete nodeId# 20250 Delete nodeId# 20294 Disable nodeId# 20234 Add nodeId# 20300 Add nodeId# 20301 Add nodeId# 20302 Disable nodeId# 20129 Add nodeId# 20303 Add nodeId# 20304 Enable nodeId# 20274 Enable nodeId# 20215 Pick Delete nodeId# 20226 Add nodeId# 20305 Disable nodeId# 20283 Pick Add nodeId# 20306 Delete nodeId# 20242 Add nodeId# 20307 Enable nodeId# 20237 Disable nodeId# 20229 Pick Pick Delete nodeId# 20306 Disable nodeId# 20284 Enable nodeId# 20283 Delete nodeId# 20222 Add nodeId# 20308 Add nodeId# 20309 Enable nodeId# 20296 Delete nodeId# 20237 Disable nodeId# 20281 Add nodeId# 20310 Delete nodeId# 20186 Pick Enable nodeId# 20231 Add nodeId# 20311 Pick Enable nodeId# 20298 Add nodeId# 20312 Enable nodeId# 20263 Delete nodeId# 20267 Pick Disable nodeId# 20241 Delete nodeId# 20240 Enable nodeId# 20129 Disable nodeId# 20274 Delete nodeId# 20163 Enable nodeId# 20221 Pick Disable nodeId# 20289 Disable nodeId# 20261 Pick Delete nodeId# 20312 Delete nodeId# 20265 Add nodeId# 20313 Pick Add nodeId# 20314 Delete nodeId# 20252 Pick Pick Add nodeId# 20315 Disable nodeId# 20282 Enable nodeId# 20284 Delete nodeId# 20248 Delete nodeId# 20301 Add nodeId# 20316 Enable nodeId# 20241 Add nodeId# 20317 Delete nodeId# 20290 Pick Add nodeId# 20318 Delete nodeId# 20243 Enable nodeId# 20235 Add nodeId# 20319 Enable nodeId# 20261 Disable nodeId# 20241 Enable nodeId# 20227 Enable nodeId# 20216 Delete nodeId# 20318 Add nodeId# 20320 Enable nodeId# 20234 Pick Enable nodeId# 20251 Disable nodeId# 20262 Disable nodeId# 20231 Pick Pick Add nodeId# 20321 Enable nodeId# 20271 Disable nodeId# 20305 Enable nodeId# 20192 Enable nodeId# 20278 Add nodeId# 20322 Add nodeId# 20323 Enable nodeId# 20249 Enable nodeId# 20305 Add nodeId# 20324 Delete nodeId# 20323 Disable nodeId# 20299 Enable nodeId# 20229 Pick Enable nodeId# 20299 Disable nodeId# 20235 Disable nodeId# 20286 Enable nodeId# 20241 Enable nodeId# 20235 Enable nodeId# 20275 Enable nodeId# 20281 Enable nodeId# 20289 Disable nodeId# 20287 Pick Add nodeId# 20325 Enable nodeId# 20282 Add nodeId# 20326 Delete nodeId# 20227 Disable nodeId# 20300 Enable nodeId# 20262 Pick Enable nodeId# 20287 Pick Pick Disable nodeId# 20229 Delete nodeId# 20305 Delete nodeId# 20249 Enable nodeId# 20231 Disable nodeId# 20251 Disable nodeId# 20313 Delete nodeId# 20300 Enable nodeId# 20313 Enable nodeId# 20286 Delete nodeId# 20274 Enable nodeId# 20251 Enable nodeId# 20229 Pick Add nodeId# 20327 Add nodeId# 20328 Delete nodeId# 20280 Delete nodeId# 20322 Pick Disable nodeId# 20244 Enable nodeId# 20244 Delete nodeId# 20297 Delete nodeId# 20320 Disable nodeId# 20245 Pick Add nodeId# 20329 Add nodeId# 20330 Disable nodeId# 20234 Delete nodeId# 20231 Add nodeId# 20331 Disable nodeId# 20221 Disable nodeId# 20286 Delete nodeId# 20275 Pick Disable nodeId# 20149 Enable nodeId# 20245 Pick Pick Enable nodeId# 20286 Enable nodeId# 20234 Disable nodeId# 20092 Enable nodeId# 20221 Disable nodeId# 20281 Add nodeId# 20332 Delete nodeId# 20179 Add nodeId# 20333 Pick Disable nodeId# 20262 Enable nodeId# 20149 Delete nodeId# 20283 Enable nodeId# 20262 Disable nodeId# 20325 Enable nodeId# 20325 Add nodeId# 20334 Add nodeId# 20335 Delete nodeId# 20310 Add nodeId# 20336 Pick Disable nodeId# 20334 Pick Add nodeId# 20337 Disable nodeId# 20316 Disable nodeId# 20328 Add nodeId# 20338 Delete nodeId# 20216 Pick Add nodeId# 20339 Add nodeId# 20340 Pick Disable nodeId# 20314 Delete nodeId# 20304 Disable nodeId# 20339 Enable nodeId# 20281 Add nodeId# 20341 Disable nodeId# 20244 Enable nodeId# 20328 Pick Delete nodeId# 20205 Enable nodeId# 20339 Delete nodeId# 20329 Disable nodeId# 20257 Add nodeId# 20342 Add nodeId# 20343 Enable nodeId# 20257 Enable nodeId# 20244 Add nodeId# 20344 Delete nodeId# 20241 Enable nodeId# 20334 Add nodeId# 20345 Pick Enable nodeId# 20092 Delete nodeId# 20334 Disable nodeId# 20149 Pick Enable nodeId# 20149 Enable nodeId# 20314 Add nodeId# 20346 Add nodeId# 20347 Disable nodeId# 20317 Add nodeId# 20348 Delete nodeId# 20292 Delete nodeId# 20270 Enable nodeId# 20316 Pick Pick Add nodeId# 20349 Add nodeId# 20350 Delete nodeId# 20346 Delete nodeId# 20313 Pick Delete nodeId# 20281 Delete nodeId# 20276 Pick Add nodeId# 20351 Delete nodeId# 20343 Disable nodeId# 20335 Disable nodeId# 20350 Disable nodeId# 20257 Add nodeId# 20352 Enable nodeId# 20257 Add nodeId# 20353 Enable nodeId# 20317 Enable nodeId# 20335 Disable nodeId# 20341 Add nodeId# 20354 Delete nodeId# 20330 Add nodeId# 20355 Pick Add nodeId# 20356 Pick Enable nodeId# 20341 Enable nodeId# 20350 Delete nodeId# 20327 Delete nodeId# 20245 Pick Add nodeId# 20357 Delete nodeId# 20351 Disable nodeId# 20285 Delete nodeId# 20284 Enable nodeId# 20285 Delete nodeId# 20092 Pick Delete nodeId# 20311 Pick Delete nodeId# 20296 Delete nodeId# 20357 Pick Add nodeId# 20358 Delete nodeId# 20314 Pick Delete nodeId# 20358 Disable nodeId# 20149 Disable nodeId# 20271 Disable nodeId# 20261 Disable nodeId# 20129 Enable nodeId# 20149 Disable nodeId# 20315 Enable nodeId# 20261 Pick Delete nodeId# 20295 Delete nodeId# 20319 Add nodeId# 20359 Delete nodeId# 20333 Delete nodeId# 20126 Pick Disable nodeId# 20355 Enable nodeId# 20129 Delete nodeId# 20348 Enable nodeId# 20315 Pick Pick Pick Add nodeId# 20360 Pick Add nodeId# 20361 Disable nodeId# 20234 Pick Enable nodeId# 20271 Pick Add nodeId# 20362 Disable nodeId# 20325 Delete nodeId# 20316 Disable nodeId# 20339 Add nodeId# 20363 Delete nodeId# 20149 Pick Disable nodeId# 20328 Enable nodeId# 20339 Pick Pick Add nodeId# 20364 Delete nodeId# 20288 Enable nodeId# 20355 Delete nodeId# 20307 Pick Pick Enable nodeId# 20325 Disable nodeId# 20256 Enable nodeId# 20234 Add nodeId# 20365 Enable nodeId# 20328 Add nodeId# 20366 Add nodeId# 20367 Delete nodeId# 20293 Disable nodeId# 20218 Pick Pick Add nodeId# 20368 Pick Add nodeId# 20369 Enable nodeId# 20256 Disable nodeId# 20365 Disable nodeId# 20350 Disable nodeId# 20289 Pick Disable nodeId# 20356 Add nodeId# 20370 Enable nodeId# 20218 Add nodeId# 20371 Delete nodeId# 20352 Disable nodeId# 20317 Disable nodeId# 20321 Pick Add nodeId# 20372 Pick Delete nodeId# 20196 Delete nodeId# 20328 Delete nodeId# 20321 Disable nodeId# 20345 Delete nodeId# 20336 Delete nodeId# 20350 Add nodeId# 20373 Disable nodeId# 20309 Disable nodeId# 20218 Enable nodeId# 20365 Disable nodeId# 20192 Disable nodeId# 20215 Delete nodeId# 20308 Delete nodeId# 20256 Add nodeId# 20374 Pick Pick Pick Disable nodeId# 20366 Disable nodeId# 20230 Pick Enable nodeId# 20215 Enable nodeId# 20356 Delete nodeId# 20218 Delete nodeId# 20289 Add nodeId# 20375 Pick Enable nodeId# 20366 Pick Disable nodeId# 20363 Enable nodeId# 20317 Delete nodeId# 20340 Enable nodeId# 20309 Enable nodeId# 20192 Pick Enable nodeId# 20363 Disable nodeId# 20368 Delete nodeId# 20303 Disable nodeId# 20282 Pick >> KqpUniqueIndex::InsertFkPartialColumnSet [GOOD] >> KqpUniqueIndex::InsertFkPkOverlap >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> KqpMultishardIndex::SecondaryIndexSelectNull [GOOD] >> KqpMultishardIndex::SortByPk+StreamLookup >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex [GOOD] >> KqpIndexes::DeleteByIndex |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> SystemView::GroupsFields [GOOD] >> SystemView::DescribeSystemFolder >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:00.644929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:00.645037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:00.645080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:00.645141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:00.645188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:00.645220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:00.645284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:00.645668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:00.728058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:00.728128Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:00.740200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:00.743874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:00.744100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:00.755093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:00.755831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:00.756577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:00.756943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:00.761653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:00.763173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:00.763238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:00.763513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:00.763581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:00.763635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:00.763733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:00.777455Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:00.891981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:00.892289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:00.892576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:00.892855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:00.892947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:00.899061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:00.899239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:00.899558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:00.899635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:00.899704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:00.899752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:00.902663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:00.902739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:00.902784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:00.904946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:00.905009Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:00.905055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:00.905115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:00.909243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:00.911401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:00.911640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:00.912680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:00.912818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:00.912877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:00.913181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:00.913235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:00.913404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:00.913515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:00.916093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:00.916150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:00.916333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:00.916376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:00.916766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:00.916844Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:00.916991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:00.917029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:00.917109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:00.917151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:00.917206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:00.917249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:00.917320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:00.917374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:00.917415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:00.919634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:00.919757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:00.919814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:00.919865Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:00.919917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:00.920029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... shard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 3 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2024-11-21T23:16:01.414005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:16:01.414115Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:595:2527], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:16:01.414415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2024-11-21T23:16:01.414463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2024-11-21T23:16:01.414597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2024-11-21T23:16:01.414632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:690:2594], at schemeshard: 72075186234409546, txId: 0, path id: 1 2024-11-21T23:16:01.415705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2024-11-21T23:16:01.416136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2024-11-21T23:16:01.416273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2024-11-21T23:16:01.417036Z node 1 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2024-11-21T23:16:01.417122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2024-11-21T23:16:01.417164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2024-11-21T23:16:01.417266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2024-11-21T23:16:01.417310Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2024-11-21T23:16:01.417347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 138 -> 240 2024-11-21T23:16:01.418108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T23:16:01.418220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:16:01.433087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.433351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.433407Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2024-11-21T23:16:01.433543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2024-11-21T23:16:01.433599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T23:16:01.433657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2024-11-21T23:16:01.433758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T23:16:01.433968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T23:16:01.434011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T23:16:01.434690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:16:01.439671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T23:16:01.439732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T23:16:01.440338Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T23:16:01.440446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T23:16:01.440489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:830:2714] TestWaitNotification: OK eventTxId 106 2024-11-21T23:16:01.441281Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:01.441544Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 331us result status StatusSuccess 2024-11-21T23:16:01.441989Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:01.443998Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2024-11-21T23:16:01.444191Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/ServerLess0" took 218us result status StatusSuccess 2024-11-21T23:16:01.444556Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 2024-11-21T23:16:01.445350Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:01.445510Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 167us result status StatusSuccess 2024-11-21T23:16:01.445811Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:01.446347Z node 1 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:00.930012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:00.930124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:00.930167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:00.930214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:00.930260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:00.930290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:00.930350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:00.931331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:01.022230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:01.022299Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:01.052614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:01.056811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:01.057043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:01.070314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:01.070983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:01.071705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:01.072065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:01.076340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:01.077691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:01.077755Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:01.077978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:01.078034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:01.078075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:01.078162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.085025Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:01.331576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:01.331874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.332138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:01.332428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:01.332516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.347407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:01.347593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:01.347830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.347891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:01.347951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:01.347987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:01.355354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.355438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:01.355496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:01.363489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.363577Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.363626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:01.363699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:01.378028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:01.383922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:01.384183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:01.385303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:01.385478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:01.385546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:01.385847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:01.385900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:01.386449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:01.386588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:01.396041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:01.396119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:01.396373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:01.396424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:01.396880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:01.396962Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:01.397077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:01.397125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:01.397176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:01.397214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:01.397270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:01.397306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:01.397406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:01.397451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:01.397487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:01.399647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:01.399805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:01.399865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:01.399923Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:01.399968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:01.400072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 024-11-21T23:16:02.013094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:02.013141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet 72057594046678944 2024-11-21T23:16:02.013195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2024-11-21T23:16:02.013365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:02.015620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T23:16:02.015769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2024-11-21T23:16:02.016172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:02.016308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:02.016377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet 72057594046678944 2024-11-21T23:16:02.016697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T23:16:02.016762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet 72057594046678944 2024-11-21T23:16:02.016884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:16:02.017128Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:599:2528], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T23:16:02.019841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:02.019897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:16:02.020088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:02.020121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T23:16:02.020394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:02.020450Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2024-11-21T23:16:02.020484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2024-11-21T23:16:02.021095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:16:02.021181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:16:02.021234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T23:16:02.021287Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T23:16:02.021348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T23:16:02.021429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T23:16:02.029772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:02.029860Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T23:16:02.029979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T23:16:02.030021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:16:02.030076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T23:16:02.030123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:16:02.030208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T23:16:02.030255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T23:16:02.033131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:16:02.034198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T23:16:02.042849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T23:16:02.042916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T23:16:02.043433Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:16:02.043555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:16:02.043596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:753:2637] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T23:16:02.050227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:02.050452Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2024-11-21T23:16:02.050497Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2024-11-21T23:16:02.050661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2024-11-21T23:16:02.050732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2024-11-21T23:16:02.077393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:02.077598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2024-11-21T23:16:02.080489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:02.080656Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2024-11-21T23:16:02.080699Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2024-11-21T23:16:02.080866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2024-11-21T23:16:02.080918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2024-11-21T23:16:02.092021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:02.092229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 >> SystemView::Describe [GOOD] >> SystemView::DescribeAccessDenied >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::CDC_Write |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::SystemViewFailOps >> TSchemeShardViewTest::EmptyName >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::CreateView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:2964:2106] recipient: [1:2845:2115] Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2967:2106] recipient: [1:2845:2115] 2024-11-21T23:15:28.467566Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:28.473652Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:28.475909Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:28.476366Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:28.477095Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:28.477128Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:28.477430Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:28.487056Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:28.487191Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:28.487341Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:28.487488Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:28.487602Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:28.487686Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:2966:2117] sender: [1:2992:2106] recipient: [1:60:2107] 2024-11-21T23:15:28.500243Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:28.500416Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:28.511724Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:28.511873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:28.511940Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:28.512025Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:28.512120Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:28.512177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:28.512230Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:28.512297Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:28.523221Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:28.523368Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:28.524892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:28.524985Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:28.525118Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:28.549827Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2024-11-21T23:15:28.552113Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1000 Path# /dev/disk3 2024-11-21T23:15:28.552186Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2024-11-21T23:15:28.552227Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 20:1002 Path# /dev/disk1 2024-11-21T23:15:28.552256Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1000 Path# /dev/disk2 2024-11-21T23:15:28.552282Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1001 Path# /dev/disk1 2024-11-21T23:15:28.552316Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1000 Path# /dev/disk3 2024-11-21T23:15:28.552344Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2024-11-21T23:15:28.552371Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 18:1002 Path# /dev/disk1 2024-11-21T23:15:28.552393Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1000 Path# /dev/disk3 2024-11-21T23:15:28.552441Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2024-11-21T23:15:28.552471Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 17:1002 Path# /dev/disk1 2024-11-21T23:15:28.552494Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1000 Path# /dev/disk3 2024-11-21T23:15:28.552534Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2024-11-21T23:15:28.552559Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 16:1001 Path# /dev/disk1 2024-11-21T23:15:28.552585Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1000 Path# /dev/disk2 2024-11-21T23:15:28.552608Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1000 Path# /dev/disk3 2024-11-21T23:15:28.552631Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2024-11-21T23:15:28.552654Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 13:1000 Path# /dev/disk2 2024-11-21T23:15:28.552680Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 12:1000 Path# /dev/disk2 2024-11-21T23:15:28.552703Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1000 Path# /dev/disk2 2024-11-21T23:15:28.552730Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1000 Path# /dev/disk2 2024-11-21T23:15:28.552765Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 11:1001 Path# /dev/disk1 2024-11-21T23:15:28.552792Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1000 Path# /dev/disk3 2024-11-21T23:15:28.552815Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2024-11-21T23:15:28.552850Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1001 Path# /dev/disk1 2024-11-21T23:15:28.552873Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1000 Path# /dev/disk3 2024-11-21T23:15:28.552893Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2024-11-21T23:15:28.552915Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 3:1000 Path# /dev/disk3 2024-11-21T23:15:28.552941Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 6:1000 Path# /dev/disk2 2024-11-21T23:15:28.553046Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 15:1001 Path# /dev/disk1 2024-11-21T23:15:28.553077Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1000 Path# /dev/disk3 2024-11-21T23:15:28.553114Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2024-11-21T23:15:28.553139Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 9:1002 Path# /dev/disk1 2024-11-21T23:15:28.553175Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 2:1000 Path# /dev/disk2 2024-11-21T23:15:28.553196Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 7:1000 Path# /dev/disk3 2024-11-21T23:15:28.553218Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2024-11-21T23:15:28.553240Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 4:1000 Path# /dev/ ... p:355} Create new pdisk PDiskId# 167:1002 Path# /dev/disk3 2024-11-21T23:15:56.159791Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 170:1002 Path# /dev/disk3 2024-11-21T23:15:56.159814Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1000 Path# /dev/disk1 2024-11-21T23:15:56.159836Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1000 Path# /dev/disk1 2024-11-21T23:15:56.159862Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2024-11-21T23:15:56.159885Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1001 Path# /dev/disk2 2024-11-21T23:15:56.159907Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 172:1002 Path# /dev/disk1 2024-11-21T23:15:56.159932Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1001 Path# /dev/disk2 2024-11-21T23:15:56.159955Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 173:1002 Path# /dev/disk1 2024-11-21T23:15:56.159979Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 163:1002 Path# /dev/disk3 2024-11-21T23:15:56.160004Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 192:1002 Path# /dev/disk2 2024-11-21T23:15:56.160026Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1001 Path# /dev/disk3 2024-11-21T23:15:56.160049Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 189:1002 Path# /dev/disk1 2024-11-21T23:15:56.160072Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2024-11-21T23:15:56.160096Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 162:1002 Path# /dev/disk2 2024-11-21T23:15:56.160119Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 175:1002 Path# /dev/disk3 2024-11-21T23:15:56.160144Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2024-11-21T23:15:56.160166Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1001 Path# /dev/disk2 2024-11-21T23:15:56.160188Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 161:1002 Path# /dev/disk3 2024-11-21T23:15:56.433915Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.281044s 2024-11-21T23:15:56.434111Z node 161 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.281267s 2024-11-21T23:15:56.464783Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2024-11-21T23:15:56.466736Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1000 Path# /dev/disk2 2024-11-21T23:15:56.466804Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1001 Path# /dev/disk1 2024-11-21T23:15:56.466831Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1000 Path# /dev/disk2 2024-11-21T23:15:56.466857Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1001 Path# /dev/disk1 2024-11-21T23:15:56.466886Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1000 Path# /dev/disk2 2024-11-21T23:15:56.466912Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1001 Path# /dev/disk1 2024-11-21T23:15:56.466938Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1000 Path# /dev/disk2 2024-11-21T23:15:56.466964Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1001 Path# /dev/disk1 2024-11-21T23:15:56.466991Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1000 Path# /dev/disk2 2024-11-21T23:15:56.467018Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1001 Path# /dev/disk1 2024-11-21T23:15:56.467044Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1000 Path# /dev/disk2 2024-11-21T23:15:56.467067Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1001 Path# /dev/disk1 2024-11-21T23:15:56.467108Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1000 Path# /dev/disk2 2024-11-21T23:15:56.467142Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1001 Path# /dev/disk1 2024-11-21T23:15:56.467167Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1000 Path# /dev/disk3 2024-11-21T23:15:56.467192Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2024-11-21T23:15:56.467218Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 213:1002 Path# /dev/disk1 2024-11-21T23:15:56.467244Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1000 Path# /dev/disk3 2024-11-21T23:15:56.467270Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2024-11-21T23:15:56.467295Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 212:1002 Path# /dev/disk1 2024-11-21T23:15:56.467319Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1000 Path# /dev/disk3 2024-11-21T23:15:56.467343Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2024-11-21T23:15:56.467366Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 211:1002 Path# /dev/disk1 2024-11-21T23:15:56.467538Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2024-11-21T23:15:56.467575Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 2024-11-21T23:15:56.467603Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2024-11-21T23:15:56.467648Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2024-11-21T23:15:56.467674Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2024-11-21T23:15:56.469569Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2024-11-21T23:15:56.469646Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2024-11-21T23:15:56.565583Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.101669s 2024-11-21T23:15:56.565791Z node 161 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.101912s >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:05.762726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:05.762819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:05.762856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:05.762903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:05.762954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:05.762989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:05.763044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:05.763349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:05.848056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:05.848110Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:05.868227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:05.873252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:05.873464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:05.883126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:05.884665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:05.885513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:05.885893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:05.894955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:05.896398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:05.896482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:05.896726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:05.896785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:05.896829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:05.896932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:05.913779Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:06.081377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:06.081633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:06.081867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:06.082116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:06.082182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:06.090044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:06.090220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:06.090475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:06.090565Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:06.090616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:06.090654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:06.100188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:06.100289Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:06.100331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:06.102718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:06.102787Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:06.102834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:06.102885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:06.107070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:06.109438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:06.109666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:06.110988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:06.111167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:06.111228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:06.111640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:06.111718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:06.111947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:06.112092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:06.114719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:06.114773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:06.115013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:06.115069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:06.115602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:06.115658Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:06.115775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:06.115820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:06.115902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:06.115961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:06.116016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:06.116056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:06.116134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:06.116179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:06.116234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:06.118281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:06.118470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:06.118511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:06.118562Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:06.118624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:06.118752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:16:06.122636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:16:06.123257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:16:06.123989Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:16:06.140976Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:16:06.143834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:06.144063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2024-11-21T23:16:06.144169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2024-11-21T23:16:06.144272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2024-11-21T23:16:06.145974Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:16:06.149411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:06.149645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2024-11-21T23:16:06.150152Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |85.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |85.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 25342, MsgBus: 22703 2024-11-21T23:15:50.634215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875282099402128:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:50.634860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001425/r3tmp/tmpJOUPeG/pdisk_1.dat 2024-11-21T23:15:51.273041Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:51.275657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:51.275732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:51.280171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25342, node 1 2024-11-21T23:15:51.383125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:51.383159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:51.383168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:51.383260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22703 TClient is connected to server localhost:22703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:51.970154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.002180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.177782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.357584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.449876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:54.424270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875299279272877:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.424366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.768737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.815499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.847474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.921658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.949763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.990653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.082747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875303574240672:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.082839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.083168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875303574240677:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.087288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:55.097381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875303574240679:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:15:55.682182Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875282099402128:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:55.682462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:56.124349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 16939, MsgBus: 61056 2024-11-21T23:15:58.869861Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875315882537603:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:58.869926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001425/r3tmp/tmpSDy3UJ/pdisk_1.dat 2024-11-21T23:15:59.040357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:59.040440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:59.046891Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:59.047153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16939, node 2 2024-11-21T23:15:59.116239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:59.116262Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:59.116269Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:59.116377Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61056 TClient is connected to server localhost:61056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:15:59.642541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:59.673021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:59.734384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:59.879143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:59.962798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:02.491729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875333062408460:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:02.491844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:02.541823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:02.584497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:02.635192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:02.706882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:02.786065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:02.863519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:02.921702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875333062408964:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:02.921792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:02.922040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875333062408969:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:02.930535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:02.947065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875333062408971:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:16:03.886765Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875315882537603:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:03.887045Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:03.943075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:07.104322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:07.104417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:07.104455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:07.104507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:07.104552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:07.104595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:07.104653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:07.104981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:07.488751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:07.488809Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:07.511518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:07.515709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:07.515891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:07.538049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:07.538916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:07.539579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:07.539921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:07.544952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:07.546245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:07.546314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:07.546572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:07.546623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:07.546659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:07.546745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.553522Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:07.696000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:07.696240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.696451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:07.696673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:07.696739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.699685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:07.699845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:07.700033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.700108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:07.700166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:07.700207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:07.702207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.702271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:07.702310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:07.704002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.704050Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.704094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:07.704136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:07.707888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:07.709812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:07.710002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:07.711012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:07.711132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:07.711173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:07.711457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:07.711509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:07.711697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:07.711790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:07.713777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:07.713818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:07.713974Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:07.714015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:07.714364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.714431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:07.714533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:07.714567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:07.714605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:07.714645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:07.714690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:07.714720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:07.714782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:07.714818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:07.714852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:07.716570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:07.716669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:07.716701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:07.716748Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:07.716795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:07.716878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... cute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "Some query" } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:07.738633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2024-11-21T23:16:07.738704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2024-11-21T23:16:07.738815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:16:07.738878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T23:16:07.738927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:07.740253Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:16:07.745845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T23:16:07.746002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T23:16:07.746353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.746443Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2024-11-21T23:16:07.746497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2024-11-21T23:16:07.746610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:07.747116Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:16:07.748536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2024-11-21T23:16:07.748659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2024-11-21T23:16:07.749027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:07.749151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:07.749202Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T23:16:07.749317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2024-11-21T23:16:07.749484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:07.749544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 100 2024-11-21T23:16:07.751372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:07.751411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:07.751580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:16:07.751670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:07.751724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2024-11-21T23:16:07.751777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2024-11-21T23:16:07.752081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.752124Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2024-11-21T23:16:07.752236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2024-11-21T23:16:07.752265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T23:16:07.752305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2024-11-21T23:16:07.752346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2024-11-21T23:16:07.752383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2024-11-21T23:16:07.752423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2024-11-21T23:16:07.752492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:16:07.752535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2024-11-21T23:16:07.752562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T23:16:07.752585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T23:16:07.753300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:16:07.753388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:16:07.753420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2024-11-21T23:16:07.753461Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T23:16:07.753499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:07.754059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:16:07.754137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2024-11-21T23:16:07.754164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2024-11-21T23:16:07.754189Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T23:16:07.754213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:16:07.754266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2024-11-21T23:16:07.756629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2024-11-21T23:16:07.758105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2024-11-21T23:16:07.758312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T23:16:07.758351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2024-11-21T23:16:07.758735Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T23:16:07.758817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:16:07.758865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:296:2288] TestWaitNotification: OK eventTxId 101 2024-11-21T23:16:07.759294Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:07.759470Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 174us result status StatusSuccess 2024-11-21T23:16:07.759801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> KqpMultishardIndex::SortByPk+StreamLookup [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> SystemView::QueryStats [GOOD] >> SystemView::QueryStatsFields >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> TContinuousBackupTests::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2024-11-21T23:15:44.139119Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875254738482558:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:44.144191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000fcd/r3tmp/tmpPHVsO0/pdisk_1.dat 2024-11-21T23:15:45.011238Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:45.019133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:45.019221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:45.037282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11613, node 1 2024-11-21T23:15:45.298372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:45.298414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:45.298422Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:45.298508Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:46.046971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:46.071406Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:15:46.076268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:49.011025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875276213319731:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.011513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.011826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875276213319758:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.016609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:15:49.035217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875276213319760:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:15:49.140498Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875254738482558:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:49.140544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:49.552667Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jd8g57kbcfdrgw93sbvtdmwe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJkYTk5ZDMtNzMwYzAxYTYtNTg5OTNjMDUtMzJiZDhkZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:49.569355Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439875276213319846:2317], owner: [1:7439875276213319843:2315], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:15:49.583072Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439875276213319846:2317], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:15:49.583999Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439875276213319846:2317], row count: 1, finished: 1 2024-11-21T23:15:49.584033Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439875276213319846:2317], owner: [1:7439875276213319843:2315], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:15:49.631828Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230949551, txId: 281474976710661] shutting down 2024-11-21T23:15:50.667036Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875279617581174:2051];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000fcd/r3tmp/tmpP6k4nA/pdisk_1.dat 2024-11-21T23:15:50.728702Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:50.834145Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:50.859760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:50.859839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:50.863755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5252, node 2 2024-11-21T23:15:50.961353Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:50.961376Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:50.961399Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:50.961500Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:51.367854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.377542Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:15:54.428140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.535209Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875296797451090:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.535301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.535655Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875296797451102:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.547599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:15:54.558900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875296797451104:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:15:54.755073Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8g5fh25e79fbt8v3974f6t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2M1ZDRlMzQtMWQ0M2FiMGYtNDc0MTkzYTktY2U4NDljNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:54.756858Z node 2 :SYSTEM_VIEWS INFO: Scan started, actor: [2:7439875296797451193:2322], owner: [2:7439875296797451189:2320], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:15:54.757406Z node 2 :SYSTEM_VIEWS INFO: Scan prepared, actor: [2:7439875296797451193:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:15:54.757900Z node 2 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [2:7439875296797451193:2322], row count: 4, finished: 1 2024-11-21T23:15: ... { TraceId: 01jd8g5nkw2164k8591mmhgtsa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjM2NTgxMTctYzFiNzI1NjctYWYyZDgwNzAtMzdlYWQzZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:00.917957Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439875323096435170:2392], owner: [3:7439875323096435167:2390], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:00.920835Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439875323096435170:2392], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:00.926252Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439875323096435170:2392], row count: 3, finished: 1 2024-11-21T23:16:00.930352Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439875323096435170:2392], owner: [3:7439875323096435167:2390], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:00.939159Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230960914, txId: 281474976715673] shutting down 2024-11-21T23:16:01.080599Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jd8g5nsm4cb8zktvmn8frpjv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzljYTk3NWUtMmEyNGFiMDctZmZjM2NlMGQtNzVjYjU5ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:01.083828Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439875327391402500:2401], owner: [3:7439875327391402496:2399], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:01.092372Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439875327391402500:2401], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:01.094475Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439875327391402500:2401], row count: 3, finished: 1 2024-11-21T23:16:01.094523Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439875327391402500:2401], owner: [3:7439875327391402496:2399], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:01.097358Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230961077, txId: 281474976715675] shutting down 2024-11-21T23:16:01.239059Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jd8g5nykb80mb5dsvpzzbzt6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZjViMWZmNGUtYjU0OTdiYzUtZmY1YTdmYjAtZWRlNmQ2MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:01.241062Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439875327391402535:2411], owner: [3:7439875327391402531:2409], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:01.246733Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439875327391402535:2411], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:01.247250Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439875327391402535:2411], row count: 4, finished: 1 2024-11-21T23:16:01.247287Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439875327391402535:2411], owner: [3:7439875327391402531:2409], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:01.249720Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230961234, txId: 281474976715677] shutting down 2024-11-21T23:16:01.436807Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jd8g5p3c733da7c9814wjsx1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTVkYTM4MjYtOTM5MGM1M2YtYTE3NTg2NmMtNGY1MmE4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:01.452402Z node 3 :SYSTEM_VIEWS INFO: Scan started, actor: [3:7439875327391402569:2421], owner: [3:7439875327391402565:2419], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:01.453124Z node 3 :SYSTEM_VIEWS INFO: Scan prepared, actor: [3:7439875327391402569:2421], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:01.453644Z node 3 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [3:7439875327391402569:2421], row count: 4, finished: 1 2024-11-21T23:16:01.453682Z node 3 :SYSTEM_VIEWS INFO: Scan finished, actor: [3:7439875327391402569:2421], owner: [3:7439875327391402565:2419], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:01.457246Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230961435, txId: 281474976715679] shutting down 2024-11-21T23:16:02.329371Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439875333055253196:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:02.329861Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000fcd/r3tmp/tmp9FY5Xp/pdisk_1.dat 2024-11-21T23:16:02.453713Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:02.482243Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:02.482336Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:02.484123Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4152, node 4 2024-11-21T23:16:02.546323Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:02.546346Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:02.546355Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:02.546467Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:02.810936Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:02.819548Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:16:06.123284Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:16:06.402919Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875350235123248:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:06.402998Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:06.406564Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875350235123260:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:06.418300Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:16:06.432577Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439875350235123262:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:16:06.889096Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jd8g5v3sfd4wz426pj9dehdv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MjRiYjVmYzUtZDc0ZjAzY2QtNmUyNmFjOS1iMzVhYTQxNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:06.895021Z node 4 :SYSTEM_VIEWS INFO: Scan started, actor: [4:7439875350235123360:2336], owner: [4:7439875350235123359:2335], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:06.898904Z node 4 :SYSTEM_VIEWS INFO: Scan prepared, actor: [4:7439875350235123360:2336], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:06.901091Z node 4 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [4:7439875350235123360:2336], row count: 4, finished: 1 2024-11-21T23:16:06.901154Z node 4 :SYSTEM_VIEWS INFO: Scan finished, actor: [4:7439875350235123360:2336], owner: [4:7439875350235123359:2335], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:06.901398Z node 4 :SYSTEM_VIEWS INFO: Scan started, actor: [4:7439875350235123373:2339], owner: [4:7439875350235123359:2335], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:06.903297Z node 4 :SYSTEM_VIEWS INFO: Scan prepared, actor: [4:7439875350235123373:2339], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:06.903975Z node 4 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [4:7439875350235123373:2339], row count: 4, finished: 1 2024-11-21T23:16:06.904026Z node 4 :SYSTEM_VIEWS INFO: Scan finished, actor: [4:7439875350235123373:2339], owner: [4:7439875350235123359:2335], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:06.906281Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230966886, txId: 281474976710661] shutting down >> KqpIndexes::DeleteByIndex [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:07.740282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:07.740378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:07.740436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:07.740476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:07.740533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:07.740563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:07.740621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:07.741095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:07.818223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:07.818273Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:07.828865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:07.831664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:07.831863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:07.837342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:07.842762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:07.843512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:07.844118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:07.856903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:07.858452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:07.858527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:07.859000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:07.859051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:07.859084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:07.859181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:07.866718Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:08.031835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:08.032078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:08.032281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:08.032490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:08.032551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:08.037377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:08.037531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:08.037781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:08.037851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:08.037896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:08.037943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:08.040761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:08.040845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:08.040893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:08.042683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:08.042732Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:08.042766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:08.042839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:08.046231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:08.060804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:08.061076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:08.062121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:08.062269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:08.062316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:08.062831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:08.062915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:08.063124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:08.063242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:08.069998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:08.070062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:08.070241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:08.070280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:08.070873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:08.070935Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:08.071034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:08.071065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:08.071112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:08.071160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:08.071201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:08.071251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:08.071334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:08.071393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:08.071431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:08.073326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:08.073436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:08.073495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:08.073550Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:08.073590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:08.073702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... :1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:09.036789Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T23:16:09.036842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:1, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:16:09.036882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2024-11-21T23:16:09.039052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T23:16:09.040028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T23:16:09.040150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2024-11-21T23:16:09.040191Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2024-11-21T23:16:09.040312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2024-11-21T23:16:09.040350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2024-11-21T23:16:09.040395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2024-11-21T23:16:09.040467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 103 2024-11-21T23:16:09.040515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2024-11-21T23:16:09.040579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:16:09.040626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:16:09.040706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2024-11-21T23:16:09.040740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T23:16:09.040759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T23:16:09.040845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:16:09.040893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2024-11-21T23:16:09.040924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2024-11-21T23:16:09.040974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2024-11-21T23:16:09.041009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2024-11-21T23:16:09.041031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2024-11-21T23:16:09.041072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:16:09.042836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:16:09.042916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:755:2634] TestWaitNotification: OK eventTxId 103 2024-11-21T23:16:09.043516Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:16:09.043766Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 281us result status StatusSuccess 2024-11-21T23:16:09.044250Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:09.044767Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:16:09.045003Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 212us result status StatusSuccess 2024-11-21T23:16:09.045503Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:09.046523Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:16:09.046719Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 228us result status StatusSuccess 2024-11-21T23:16:09.047118Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 1811, MsgBus: 15441 2024-11-21T23:15:49.162634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875275617945809:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:49.162694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001429/r3tmp/tmpCAvFgJ/pdisk_1.dat 2024-11-21T23:15:50.000602Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:50.028128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:50.028208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:50.035658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1811, node 1 2024-11-21T23:15:50.308924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:50.308946Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:50.308953Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:50.309042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15441 TClient is connected to server localhost:15441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:51.358824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.384223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.601643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.792513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.872691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:53.675349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875292797816700:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.675495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.903584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.943563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.990208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.046772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.104425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.164559Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875275617945809:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:54.170175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:54.193889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.271200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875297092784499:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.271320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.271541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875297092784505:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.275041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:54.289146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875297092784507:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:15:55.502804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 7645, MsgBus: 14239 2024-11-21T23:15:59.861880Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875321992204615:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:59.861981Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001429/r3tmp/tmpXuCHuV/pdisk_1.dat 2024-11-21T23:16:00.011593Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:00.025135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:00.025215Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:00.027881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7645, node 2 2024-11-21T23:16:00.134972Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:00.135001Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:00.135008Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:00.135108Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14239 TClient is connected to server localhost:14239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:00.693554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:00.701598Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:00.714821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:16:00.816116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:01.118650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:01.213423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:03.663644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875339172075469:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:03.663774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:03.705628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.749995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.798662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.845064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.889016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.937025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:04.048694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875343467043266:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:04.048824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:04.049155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875343467043271:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:04.053920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:04.072268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875343467043273:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:16:04.875339Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875321992204615:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:04.875402Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:05.285819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 9838, MsgBus: 27006 2024-11-21T23:15:50.921220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875281952864304:2185];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:50.921452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001418/r3tmp/tmp9HEmez/pdisk_1.dat 2024-11-21T23:15:51.411480Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:51.413591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:51.413692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:51.417584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9838, node 1 2024-11-21T23:15:51.650918Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:51.650942Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:51.650962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:51.651048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27006 TClient is connected to server localhost:27006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:52.360317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:52.399078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:15:52.573162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.772586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.862918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:54.899510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875299132735026:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.969313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.170599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.196347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.222324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.253608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.305114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.382896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.480336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875303427702826:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.480392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.480570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875303427702831:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.483578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:55.496172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875303427702833:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:15:55.912188Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875281952864304:2185];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:55.912245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:56.768974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 619 cpu_time_us: 619 } query_phases { duration_us: 1602 cpu_time_us: 1602 } query_phases { duration_us: 20030 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 12798 affected_shards: 1 } query_phases { duration_us: 1662 cpu_time_us: 1662 } query_phases { duration_us: 5851 cpu_time_us: 5851 } query_phases { duration_us: 2694 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 2961 } query_phases { duration_us: 1461 cpu_time_us: 1461 } query_phases { duration_us: 4752 cpu_time_us: 4752 } query_phases { duration_us: 5760 cpu_time_us: 4633 } query_phases { duration_us: 5359 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 3173 affected_shards: 2 } compilation { duration_us: 1257082 cpu_time_us: 1238613 } process_cpu_time_us: 18551 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732230958269,\"TaskId\":1,\"Host\":\"ghrun-uc25mmfz34\",\"ComputeTimeUs\":114}],\"CpuTimeUs\":563}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1732230958269,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":563,\"Max\":563,\"Min\":563}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1732230958269,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732230958269,\"TaskId\":2,\"Host\":\"ghrun-uc25mmfz34\",\"ComputeTimeUs\":94}],\"CpuTimeUs\":558}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"BaseTimeMs\":1732230958269,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":558,\"Max\":558,\"Min\":558}},\"CTE Name\":\"precompute_8_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":35,\"Subplan Name\":\"CTE precompute_8_1\",\"Plans\":[{\"PlanNodeId\":34,\"Plans\":[{\"PlanNodeId\":33,\"Plans\":[{\"PlanNodeId\":32,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1},{\"InternalOperatorId\":1},{\"InternalOperatorId\":1},{\"InternalOperatorId\":1}],\"Iterator\":\"Map\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"E-Rows\":\"1\",\"Predicate\":\"Nth\",\"Name\":\"Filter\",\"E-Size\":\"5\",\"E-Cost\":\"0\"}],\"Node Type\":\"ConstantExpr-Filter\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1732230958259,\"StartTimeMs\":1732230958257,\"TaskId\":2,\"Host\":\"ghrun-uc25mmfz34\",\"ComputeTimeUs\":9,\"OutputChannels\":[{\"ChannelId\":2,\"DstStageId\":3}]}],\"PeakMemoryUsageBytes\":65536,\"DurationUs\":2000,\"CpuTimeUs\":738}],\"UseLlvm\":\"undefined\",\"DurationUs\":{\"Count\":1,\"Sum\":2000,\"Max\":2000,\"Min\":2000},\"Output\":[{\"Pop\":{},\"Name\":\"34\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":104857 ... cf8-bbf4675a-dfb49fd3-c38dc1e\"))))\n(let $90 (DqPhyStage \'() (lambda \'() (ToStream (Just $88))) \'(\'(\'\"_logical_id\" \'5233) \'(\'\"_id\" \'\"d2ad832f-35e1d616-fe0aabb5-cfe51dcd\"))))\n(let $91 (DqCnValue (TDqOutput $89 \'0)))\n(let $92 (DqCnValue (TDqOutput $90 \'0)))\n(let $93 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $94 \'($87 $93))\n(let $95 (KqpPhysicalTx \'($89 $90) \'($91 $92) \'($47 $94) $3))\n(let $96 \'\"%kqp%tx_result_binding_7_0\")\n(let $97 (DictType $19 (TupleType $40 $52)))\n(let $98 %kqp%tx_result_binding_7_0)\n(let $99 (DqPhyStage \'() (lambda \'() (Iterator (FlatMap (Map $88 (lambda \'($178) (AsStruct \'(\'\"Key\" (Member $178 \'\"Key\")) \'(\'\"fk1\" (Member $178 \'\"fk1\")) \'(\'\"fk3\" (Member $178 \'\"fk3\"))))) (lambda \'($179) (block \'(\n (let $180 \'(\'\"Key\" (Member $179 \'\"Key\")))\n (let $181 \'(\'\"fk1\" (Member $179 \'\"fk1\")))\n (let $182 \'(\'\"fk3\" (Member $179 \'\"fk3\")))\n (return (IfPresent (Lookup $98 (AsStruct $180)) (lambda \'($183) (If (Nth $183 \'1) (Just (AsStruct $180 $181 \'(\'\"fk2\" (Member (Nth $183 \'0) \'\"fk2\")) $182)) (Nothing (OptionalType $27)))) (Just (AsStruct $180 $181 $43 $182))))\n)))))) \'(\'(\'\"_logical_id\" \'5855) \'(\'\"_id\" \'\"5a83efa-ec64099b-4a9d3bc0-fdc88564\"))))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator (Map (Filter (DictItems $98) (lambda \'($184) (Nth (Nth $184 \'1) \'1))) (lambda \'($185) (block \'(\n (let $186 (Nth (Nth $185 \'1) \'0))\n (return (AsStruct \'(\'\"Key\" (Member (Nth $185 \'0) \'\"Key\")) \'(\'\"fk1\" (Member $186 \'\"fk1\")) \'(\'\"fk2\" (Member $186 \'\"fk2\")) \'(\'\"fk3\" (Member $186 \'\"fk3\"))))\n)))))) \'(\'(\'\"_logical_id\" \'5869) \'(\'\"_id\" \'\"e9035bf2-f3f00c8-a3142fd2-5eb98444\"))))\n(let $101 (DqCnUnionAll (TDqOutput $99 \'0)))\n(let $102 (lambda \'($187) $187))\n(let $103 (DqPhyStage \'($101) $102 \'(\'(\'\"_logical_id\" \'5965) \'(\'\"_id\" \'\"29e46a57-975b4f0b-8099c4b-e9e6acd6\"))))\n(let $104 (DqCnUnionAll (TDqOutput $100 \'0)))\n(let $105 (DqPhyStage \'($104) $102 \'(\'(\'\"_logical_id\" \'5980) \'(\'\"_id\" \'\"50fe881c-647ec9da-39ea2615-c4e1b341\"))))\n(let $106 \'($99 $100 $103 $105))\n(let $107 (DqCnResult (TDqOutput $103 \'0) \'()))\n(let $108 (DqCnResult (TDqOutput $105 \'0) \'()))\n(let $109 (KqpTxResultBinding $97 \'\"7\" \'0))\n(let $110 (KqpPhysicalTx $106 \'($107 $108) \'($94 \'($96 $109)) $3))\n(let $111 \'\"%kqp%tx_result_binding_7_1\")\n(let $112 (DqPhyStage \'() (lambda \'() (block \'(\n (let $188 \'(\'\"Key\" \'\"Value\" \'\"fk1\" \'\"fk3\"))\n (return (KqpEffects (KqpUpsertRows $22 (Iterator %kqp%tx_result_binding_7_1) $188 \'(\'(\'\"Mode\" \'\"upsert\")))))\n))) \'(\'(\'\"_logical_id\" \'6495) \'(\'\"_id\" \'\"e7fbdf66-3e0248fc-507f2bad-a9ceebf\"))))\n(let $113 \'\"%kqp%tx_result_binding_8_1\")\n(let $114 (ListType $27))\n(let $115 (DqPhyStage \'() (lambda \'() (KqpEffects (KqpDeleteRows $64 (Iterator %kqp%tx_result_binding_8_1)))) \'(\'(\'\"_logical_id\" \'6509) \'(\'\"_id\" \'\"9bf40a1b-29b59a87-85cc35e5-292c9a0c\"))))\n(let $116 \'\"%kqp%tx_result_binding_8_0\")\n(let $117 (DqPhyStage \'() (lambda \'() (block \'(\n (let $189 \'(\'\"fk1\" \'\"fk2\" \'\"fk3\" \'\"Key\"))\n (return (KqpEffects (KqpUpsertRows $64 (Iterator %kqp%tx_result_binding_8_0) $189 \'())))\n))) \'(\'(\'\"_logical_id\" \'6523) \'(\'\"_id\" \'\"b13e1f35-60de7d59-6825d7f2-6e63cf92\"))))\n(let $118 \'($112 $115 $117))\n(let $119 (KqpTxResultBinding $11 \'\"7\" \'1))\n(let $120 (KqpTxResultBinding $114 \'\"8\" \'0))\n(let $121 (KqpTxResultBinding $114 \'\"8\" \'1))\n(let $122 \'(\'($111 $119) \'($116 $120) \'($113 $121)))\n(let $123 (KqpPhysicalTx $118 \'() $122 \'($36 \'(\'\"with_effects\"))))\n(let $124 \'($4 $17 $38 $48 $61 $76 $86 $95 $110 $123))\n(return (KqpPhysicalQuery $124 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 1336071 total_cpu_time_us: 1296676 2024-11-21T23:15:58.436083Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 19822, MsgBus: 12868 2024-11-21T23:16:00.530731Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875323348893733:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:00.534749Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001418/r3tmp/tmpcCkDUz/pdisk_1.dat 2024-11-21T23:16:00.679939Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:00.709527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:00.709624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:00.712726Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19822, node 2 2024-11-21T23:16:00.903097Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:00.903127Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:00.903136Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:00.903256Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12868 TClient is connected to server localhost:12868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:01.654973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:01.662734Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:16:01.668291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:01.797459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:02.038736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:02.124173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:04.833215Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875340528764613:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:04.833377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:04.900841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:04.978352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:05.018957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:05.059917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:05.097232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:05.158089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:05.224079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875344823732410:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:05.224195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:05.224441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875344823732415:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:05.230512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:05.243383Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875344823732417:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:16:05.523605Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875323348893733:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:05.523687Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:06.527391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> TContinuousBackupTests::Basic [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] >> SystemView::SystemViewFailOps [GOOD] >> SystemView::TopPartitionsFields [GOOD] >> SystemView::TopPartitionsFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:10.081457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:10.081547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:10.081586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:10.081619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:10.081688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:10.081716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:10.081771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:10.085520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:10.161138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:10.161197Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:10.188282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:10.192263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:10.192454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:10.201505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:10.207186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:10.208007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:10.208357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:10.239659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:10.240940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:10.241001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:10.241238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:10.241283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:10.241316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:10.241412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:10.255367Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:10.389700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:10.389934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:10.390100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:10.390294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:10.390353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:10.398264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:10.398444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:10.398643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:10.398725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:10.398758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:10.398807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:10.401770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:10.401830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:10.401863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:10.410899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:10.410965Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:10.411007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:10.411073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:10.414786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:10.416808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:10.417013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:10.417983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:10.418110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:10.418150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:10.418455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:10.418501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:10.418672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:10.418764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:10.420980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:10.421021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:10.421180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:10.421217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:10.421547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:10.421592Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:10.421712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:10.421741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:10.421782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:10.421819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:10.421852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:10.421891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:10.421964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:10.421998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:10.422037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:10.423898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:10.423997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:10.424030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:10.424080Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:10.424129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:10.424227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... txId: 104 2024-11-21T23:16:11.171343Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2024-11-21T23:16:11.171360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2024-11-21T23:16:11.171412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T23:16:11.171698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:16:11.171747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:16:11.171769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T23:16:11.171792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2024-11-21T23:16:11.174074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:16:11.176133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2024-11-21T23:16:11.176258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T23:16:11.176454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:16:11.176531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:16:11.176572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2024-11-21T23:16:11.189169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 795 } } 2024-11-21T23:16:11.189253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T23:16:11.189419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 795 } } 2024-11-21T23:16:11.189525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 795 } } FAKE_COORDINATOR: Erasing txId 104 2024-11-21T23:16:11.191164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T23:16:11.191223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2024-11-21T23:16:11.191371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T23:16:11.191439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2024-11-21T23:16:11.191553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 304 RawX2: 4294969588 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2024-11-21T23:16:11.191621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:11.191659Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:11.191719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2024-11-21T23:16:11.191762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2024-11-21T23:16:11.202973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:11.203471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:11.203780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:11.203818Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T23:16:11.203899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2024-11-21T23:16:11.203932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T23:16:11.203975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2024-11-21T23:16:11.204082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:342:2317] message: TxId: 104 2024-11-21T23:16:11.204167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2024-11-21T23:16:11.209110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T23:16:11.209211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T23:16:11.209382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:16:11.209429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2024-11-21T23:16:11.209451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2024-11-21T23:16:11.209473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:16:11.209494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2024-11-21T23:16:11.209522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2024-11-21T23:16:11.209570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2024-11-21T23:16:11.210109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:16:11.210161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2024-11-21T23:16:11.210232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:16:11.210270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:16:11.210313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:16:11.214564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:16:11.214627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:736:2639] 2024-11-21T23:16:11.214720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2024-11-21T23:16:11.215422Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:16:11.215747Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 349us result status StatusPathDoesNotExist 2024-11-21T23:16:11.215911Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:16:11.216410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:16:11.216584Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 172us result status StatusPathDoesNotExist 2024-11-21T23:16:11.216746Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TStateStorageTest::ShouldSaveGetOldSmallState >> TStorageServiceTest::ShouldRegister >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> KqpMultishardIndex::DataColumnWrite+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 62204, MsgBus: 21001 2024-11-21T23:15:50.441746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875283552320130:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:50.442193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00141e/r3tmp/tmpohE233/pdisk_1.dat 2024-11-21T23:15:51.007097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:51.007200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:51.038132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:51.112591Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62204, node 1 2024-11-21T23:15:51.211485Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:51.211509Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:51.211515Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:51.211628Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21001 TClient is connected to server localhost:21001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:51.884451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.906165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.100603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.281493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.357462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:54.778087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875300732190876:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.788664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.820515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.873906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.911125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.956064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.990637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.072991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:55.127090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875305027158670:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.127221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.127502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875305027158675:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:55.132464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:55.148073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875305027158677:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:15:55.430539Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875283552320130:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:55.441134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:56.361215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:58.281796Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g5jgh6cfdn2nsvfk3cgcz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAzOTY1MmUtYmVmZDE5NWItZWNkOWEzNjktMTgxNGNmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:15:58.302505Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjAzOTY1MmUtYmVmZDE5NWItZWNkOWEzNjktMTgxNGNmYmQ=, ActorId: [1:7439875309322127066:2521], ActorState: ExecuteState, TraceId: 01jd8g5jgh6cfdn2nsvfk3cgcz, Create QueryResponse for error on request, msg: 2024-11-21T23:15:59.083063Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g5k7h13p0rxt7tjpztqqq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAzOTY1MmUtYmVmZDE5NWItZWNkOWEzNjktMTgxNGNmYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:15:59.083266Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjAzOTY1MmUtYmVmZDE5NWItZWNkOWEzNjktMTgxNGNmYmQ=, ActorId: [1:7439875309322127066:2521], ActorState: ExecuteState, TraceId: 01jd8g5k7h13p0rxt7tjpztqqq, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4208, MsgBus: 12394 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00141e/r3tmp/tmp53DrDn/pdisk_1.dat 2024-11-21T23:16:00.116384Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:00.124734Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:00.142044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:00.142124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:00.145013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4208, node 2 2024-11-21T23:16:00.193123Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:00.193145Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:00.193151Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:00.193287Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12394 TClient is connected to server localhost:12394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:00.720762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:00.732831Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:16:00.745773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:00.838670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:01.052024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:01.141801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:03.629266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875336844899975:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:03.629369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:03.672861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.723433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.784766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.873450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:03.935252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:04.014080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:04.118906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875341139867778:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:04.118973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:04.119126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875341139867783:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:04.123823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:04.150598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875341139867785:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:16:05.321838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:07.628043Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g5vbb17hx82mb76m70kmm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2MzZWJmZWMtNmFmNTg1YjEtZjg2MjE2ZDctNDRiZDI5OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:16:07.628246Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2MzZWJmZWMtNmFmNTg1YjEtZjg2MjE2ZDctNDRiZDI5OWE=, ActorId: [2:7439875345434836173:2518], ActorState: ExecuteState, TraceId: 01jd8g5vbb17hx82mb76m70kmm, Create QueryResponse for error on request, msg: 2024-11-21T23:16:08.532054Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g5waq5qbzb5jzb4k52zz8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2MzZWJmZWMtNmFmNTg1YjEtZjg2MjE2ZDctNDRiZDI5OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:16:08.532267Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2MzZWJmZWMtNmFmNTg1YjEtZjg2MjE2ZDctNDRiZDI5OWE=, ActorId: [2:7439875345434836173:2518], ActorState: ExecuteState, TraceId: 01jd8g5waq5qbzb5jzb4k52zz8, Create QueryResponse for error on request, msg: |85.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::SystemViewFailOps [GOOD] Test command err: 2024-11-21T23:15:41.757230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875243196225408:2216];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:41.759341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001907/r3tmp/tmpwO0H6a/pdisk_1.dat 2024-11-21T23:15:42.325866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:42.325986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:42.329933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:42.346833Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15130, node 1 2024-11-21T23:15:42.364023Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:42.364094Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:42.420185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:42.420211Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:42.420221Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:42.420362Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T23:15:43.054794Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637722 Duration# 0.171519s 2024-11-21T23:15:43.054843Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.171619s Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:43.122755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:43.168064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:46.755858Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875243196225408:2216];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:46.755926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:47.804205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875268966030016:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:47.804288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:47.804665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875268966030029:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:47.812400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:15:47.840934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875268966030031:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:15:48.368269Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jd8g58yk0s7x9cms1n6yn1hr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc0YTIwYjYtNmY1ODA5N2MtNjYxMTI2OTEtNzdiNzgzNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:48.536592Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jd8g59hr6x05nda2d405k2r5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE2NmI5NTgtNGE0ZWI5YTItZjk1ZTA0MGQtYzk5ZjY5ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:48.540776Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439875273260997484:2330], owner: [1:7439875273260997481:2328], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T23:15:48.617873Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439875273260997484:2330], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:15:48.618590Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439875273260997484:2330], row count: 1, finished: 1 2024-11-21T23:15:48.618645Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439875273260997484:2330], owner: [1:7439875273260997481:2328], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T23:15:48.627906Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230948534, txId: 281474976710662] shutting down test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001907/r3tmp/tmpktOvpH/pdisk_1.dat 2024-11-21T23:15:50.595829Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:50.611791Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:50.633534Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:50.633587Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:50.638004Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13094, node 6 2024-11-21T23:15:50.697387Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:50.697417Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:50.697425Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:50.697531Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:50.895816Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.907917Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:15:53.728501Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875293132535838:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.728605Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.729390Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875293132535851:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.739699Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:15:53.756054Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439875293132535853:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:15:54.002244Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8g5c0maj10rg96t8804n0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZWNlZWYzOGEtNTI4ODRhY2MtNzc1ZWIwY2UtMWRmMTQ3ZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:54.006657Z node 6 :SYSTEM_VIEWS INFO: Scan started, actor: [6:7439875297427503235:2311], owner: [6:7439875297427503232:2309], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:15:54.014695Z node 6 :SYSTEM_VIEWS INFO: Scan prepared, actor: [6:7439875297427503235:2311], schemeshard id: ... row count: 4, finished: 1 2024-11-21T23:16:03.728883Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439875338896175293:2340], owner: [7:7439875338896175289:2338], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:03.731954Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230963712, txId: 281474976710664] shutting down 2024-11-21T23:16:03.876392Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd8g5rh04vvq9qx6g50p727r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Zjg1NTRkMTEtZDdmZmI2ODYtMjNiMGE5ZTktNjMyM2YxZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:03.878324Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439875338896175325:2349], owner: [7:7439875338896175321:2347], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:03.882903Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439875338896175325:2349], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:03.883255Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439875338896175325:2349], row count: 2, finished: 1 2024-11-21T23:16:03.883290Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439875338896175325:2349], owner: [7:7439875338896175321:2347], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:03.885797Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230963874, txId: 281474976710666] shutting down 2024-11-21T23:16:04.030677Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jd8g5rnq7qctdd08ra757099, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2ZlZDRhMzUtY2EyMTUwMDItOGRkNTg0NTktM2I0Njg3N2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:04.037777Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439875343191142653:2358], owner: [7:7439875343191142650:2356], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:04.041398Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439875343191142653:2358], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:04.041764Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439875343191142653:2358], row count: 3, finished: 1 2024-11-21T23:16:04.041825Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439875343191142653:2358], owner: [7:7439875343191142650:2356], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:04.043749Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230964027, txId: 281474976710668] shutting down 2024-11-21T23:16:04.180352Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jd8g5rty1am6aekz02mamn6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZmZkYzljMmItNDRhNWI1OC03N2UwOTk1ZS02ZDM1NmNhYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:04.183544Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439875343191142686:2367], owner: [7:7439875343191142682:2365], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:04.184851Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439875343191142686:2367], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:04.185147Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439875343191142686:2367], row count: 2, finished: 1 2024-11-21T23:16:04.185195Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439875343191142686:2367], owner: [7:7439875343191142682:2365], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:04.187168Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230964176, txId: 281474976710670] shutting down 2024-11-21T23:16:04.315988Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jd8g5rz9818bs887zxppsenf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTRkMGI2YmMtMTVmMjgzYzMtMjQ0OGI0YTMtMjZmMjVkNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:04.320300Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439875343191142719:2377], owner: [7:7439875343191142716:2375], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:04.323924Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439875343191142719:2377], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:04.324314Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439875343191142719:2377], row count: 3, finished: 1 2024-11-21T23:16:04.324352Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439875343191142719:2377], owner: [7:7439875343191142716:2375], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2024-11-21T23:16:04.332439Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230964315, txId: 281474976710672] shutting down 2024-11-21T23:16:05.967798Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439875347689906356:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:05.967861Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001907/r3tmp/tmpyz4iOE/pdisk_1.dat 2024-11-21T23:16:06.108092Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:06.136156Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:06.136242Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:06.138733Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1639, node 8 2024-11-21T23:16:06.220915Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:06.220940Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:06.220948Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:06.221065Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:06.523664Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2024-11-21T23:16:09.807915Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439875364869776402:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:09.808028Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions }
: Error: Execution, code: 1060
:2:28: Error: Executing DROP TABLE
: Error: Incorrect scheme found while performing Kikimr operation., code: 2003
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 2024-11-21T23:16:09.892203Z node 8 :TX_PROXY ERROR: [ReadTable [8:7439875364869776423:2307] TxId# 281474976715663] Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats] 2024-11-21T23:16:09.892326Z node 8 :TX_PROXY ERROR: [ReadTable [8:7439875364869776423:2307] TxId# 281474976715663] RESPONSE Status# ResolveError shard: 0 table: /Root/.sys/partition_stats
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats], code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Cannot read system table '/Root/.sys/partition_stats', tableId# [72057594046644480:1:0:partition_stats]
: Error: Bulk upsert to table '/Root/.sys/partition_stats'is not supported. Table is a system view
: Error: Check failed: path: '/Root/.sys', error: path part '.sys' is reserved by the system
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200
: Error: Check failed: path: '/Root/.sys/partition_stats', error: path hasn't been resolved, nearest resolved path: '/Root' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), code: 200200 >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15767, MsgBus: 15568 2024-11-21T23:15:37.768989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875227583501425:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:37.769031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001452/r3tmp/tmpMItVWE/pdisk_1.dat 2024-11-21T23:15:38.521741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:38.521819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:38.536844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:38.546518Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15767, node 1 2024-11-21T23:15:38.771236Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:38.771265Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:38.771274Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:38.771392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15568 TClient is connected to server localhost:15568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:40.105554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.150233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.333574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.668947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.819213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:42.770740Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875227583501425:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:42.770826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:44.317940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875257648273986:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:44.343202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:44.816281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:44.878415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:44.961046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.012943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.074985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.166635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.262753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875261943241795:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:45.262838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:45.263207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875261943241800:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:45.267339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:45.293206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875261943241802:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:15:46.892422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 63738, MsgBus: 21080 2024-11-21T23:15:50.126336Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875280692074370:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:50.127368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001452/r3tmp/tmpsqvqu9/pdisk_1.dat 2024-11-21T23:15:50.295201Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:50.296495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:50.296574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:50.299855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63738, node 2 2024-11-21T23:15:50.358650Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:50.358688Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:50.358696Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:50.358824Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21080 TClient is connected to server localhost:21080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:50.915855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.928458Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:15:50.944949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.074381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.277891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.356143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:53.785372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875293576977942:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.785513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.830809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.882780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:53.964033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.024517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.076992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.146865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:54.276779Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875297871945741:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.276879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.277099Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875297871945746:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:54.281363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:54.299897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875297871945748:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:15:55.125716Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875280692074370:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:55.125797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:55.538040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:05.232152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:16:05.232195Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:05.715689Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439875345116590342:2814], TxId: 281474976715731, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NjI3ZDM0OTMtZjFlODY1ODQtZWZlOTdlYzEtYWI5ODhiM2M=. TraceId : 01jd8g5sz889qba88pv9xewyz9. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T23:16:05.718542Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439875345116590343:2815], TxId: 281474976715731, task: 2. Ctx: { TraceId : 01jd8g5sz889qba88pv9xewyz9. SessionId : ydb://session/3?node_id=2&id=NjI3ZDM0OTMtZjFlODY1ODQtZWZlOTdlYzEtYWI5ODhiM2M=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439875345116590339:2460], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T23:16:05.724538Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjI3ZDM0OTMtZjFlODY1ODQtZWZlOTdlYzEtYWI5ODhiM2M=, ActorId: [2:7439875302166913357:2460], ActorState: ExecuteState, TraceId: 01jd8g5sz889qba88pv9xewyz9, Create QueryResponse for error on request, msg: 2024-11-21T23:16:05.898782Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715734 at tablet 72075186224037933 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037933) | 2024-11-21T23:16:05.899608Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715734 at tablet 72075186224037932 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037932) | 2024-11-21T23:16:05.992668Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715734 at tablet 72075186224037930 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037930) | 2024-11-21T23:16:05.998700Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715734 at tablet 72075186224037931 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037931) | 2024-11-21T23:16:06.035647Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2024-11-21T23:16:06.035687Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2024-11-21T23:16:06.102612Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2024-11-21T23:16:06.135091Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2024-11-21T23:16:06.166583Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2024-11-21T23:16:06.166618Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2024-11-21T23:16:06.190580Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2024-11-21T23:16:06.190623Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2024-11-21T23:16:11.004992Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2024-11-21T23:16:11.005070Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2024-11-21T23:16:11.005097Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 11012, MsgBus: 15630 2024-11-21T23:15:38.498791Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875229040033510:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:38.500463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001432/r3tmp/tmph9KFNr/pdisk_1.dat 2024-11-21T23:15:39.275441Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:39.325661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:39.325740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:39.333610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11012, node 1 2024-11-21T23:15:39.591131Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:39.591157Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:39.591163Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:39.591262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15630 TClient is connected to server localhost:15630 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:40.441331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.493885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.505156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:15:40.717708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:40.927789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:41.038523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:43.474622Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875229040033510:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:43.474685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:44.099050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875250514871559:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:44.116217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:44.165255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:44.239114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:15:44.306799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:44.377778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:44.432294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:15:44.536215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:15:44.646427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875254809839358:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:44.646521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:44.646781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875254809839363:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:44.652031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:15:44.670981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875254809839365:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:15:46.700287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:53.873168Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g5dwa3zet9cjexeekq0dw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgwMDhlNDEtODJkYzUyNDYtM2MxMDA5OTctMWI0MzllMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:15:53.886413Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgwMDhlNDEtODJkYzUyNDYtM2MxMDA5OTctMWI0MzllMTg=, ActorId: [1:7439875267694742374:2527], ActorState: ExecuteState, TraceId: 01jd8g5dwa3zet9cjexeekq0dw, Create QueryResponse for error on request, msg: 2024-11-21T23:15:54.258574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:54.258604Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:57.182825Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710715 at tablet 72075186224037930 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037930) | 2024-11-21T23:15:57.184074Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710715 at tablet 72075186224037931 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037931) | 2024-11-21T23:15:57.238597Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710715 at tablet 72075186224037928 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037928) | 2024-11-21T23:15:57.244780Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710715 at tablet 72075186224037933 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037933) | 2024-11-21T23:15:57.245529Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710715 at tablet 72075186224037932 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037932) | 2024-11-21T23:15:57.246094Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710715 at tablet 72075186224037934 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037934) | 2024-11-21T23:15:57.295313Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2024-11-21T23:15:57.295350Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2024-11-21T23:15:57.327785Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2024-11-21T23:15:57.327827Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2024-11-21T23:15:57.339216Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2024-11-21T23:15:57.344730Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2024-11-21T23:15:57.344763Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2024-11-21T23:15:57.344782Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2024-11-21T23:15:57.348686Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2024-11-21T23:15:57.356111Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2024-11-21T23:15:57.356151Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2024-11-21T23:16:02.209199Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2024-11-21T23:16:07.687029Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2024-11-21T23:16:07.687066Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2024-11-21T23:16:07.692371Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2024-11-21T23:16:07.692405Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2024-11-21T23:16:07.697228Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2024-11-21T23:16:07.810165Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2024-11-21T23:16:07.810200Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2024-11-21T23:16:09.665459Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g5x0z7arwcbvzgxtqe4db, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgwMDhlNDEtODJkYzUyNDYtM2MxMDA5OTctMWI0MzllMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:16:09.665726Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgwMDhlNDEtODJkYzUyNDYtM2MxMDA5OTctMWI0MzllMTg=, ActorId: [1:7439875267694742374:2527], ActorState: ExecuteState, TraceId: 01jd8g5x0z7arwcbvzgxtqe4db, Create QueryResponse for error on request, msg: 2024-11-21T23:16:10.812224Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g5ydqcz3xhxa416k0ch6v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgwMDhlNDEtODJkYzUyNDYtM2MxMDA5OTctMWI0MzllMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:16:10.812475Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgwMDhlNDEtODJkYzUyNDYtM2MxMDA5OTctMWI0MzllMTg=, ActorId: [1:7439875267694742374:2527], ActorState: ExecuteState, TraceId: 01jd8g5ydqcz3xhxa416k0ch6v, Create QueryResponse for error on request, msg: 2024-11-21T23:16:12.211726Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g60r859z07wvnpcccts0v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgwMDhlNDEtODJkYzUyNDYtM2MxMDA5OTctMWI0MzllMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:16:12.211910Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgwMDhlNDEtODJkYzUyNDYtM2MxMDA5OTctMWI0MzllMTg=, ActorId: [1:7439875267694742374:2527], ActorState: ExecuteState, TraceId: 01jd8g60r859z07wvnpcccts0v, Create QueryResponse for error on request, msg: 2024-11-21T23:16:12.283081Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found >> DbCounters::TabletsSimple [GOOD] >> LabeledDbCounters::OneTablet >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> TCdcStreamTests::VirtualTimestamps >> SystemView::TopPartitionsTables [GOOD] >> SystemView::TopPartitionsRanges >> SystemView::DescribeSystemFolder [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState >> TCdcStreamTests::Basic >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> SystemView::DescribeAccessDenied [GOOD] >> SystemView::CollectScriptingQueries >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> RetryPolicy::RetryWithBatching [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::DescribeSystemFolder [GOOD] Test command err: 2024-11-21T23:15:45.943013Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875258410167106:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:45.943884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000d7d/r3tmp/tmpBE3T4U/pdisk_1.dat 2024-11-21T23:15:46.676105Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:46.677106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:46.677162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:46.680025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2898, node 1 2024-11-21T23:15:46.839157Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:46.839183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:46.839212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:46.839296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:47.270716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:47.283292Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:15:49.703483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875275590036784:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.703598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.704051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875275590036796:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.708319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:15:49.733362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875275590036798:2303], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:15:50.327086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jd8g58ggb7fx9wvchctamb6w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWEwOTkxZC1hNmQ0YmEzYy01YjRmMDg0Ni05MDA3MTRjNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:50.370802Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439875279885004183:2312], owner: [1:7439875279885004179:2310], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T23:15:50.374956Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439875279885004183:2312], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:15:50.393106Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439875279885004183:2312], row count: 1, finished: 1 2024-11-21T23:15:50.393191Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439875279885004183:2312], owner: [1:7439875279885004179:2310], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T23:15:50.398582Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230950325, txId: 281474976710660] shutting down 2024-11-21T23:15:50.932163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875258410167106:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:50.932259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:51.583015Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jd8g5cfveqcjzp51rg5t8wn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzY5ZDQwZjEtM2JjMzZmZGUtMWNiNjE1NzktMzExNzQxMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:51.585305Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439875284179971524:2326], owner: [1:7439875284179971520:2324], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T23:15:51.585940Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439875284179971524:2326], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:15:51.586203Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439875284179971524:2326], row count: 1, finished: 1 2024-11-21T23:15:51.586228Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439875284179971524:2326], owner: [1:7439875284179971520:2324], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T23:15:51.587679Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230951582, txId: 281474976710662] shutting down 2024-11-21T23:15:52.722361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8g5dmq76mxd6qebkbjzab2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdiYzFmM2YtMWY4NjlmMjctNDExMmQ3YTYtMjRhYTk1NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:52.724457Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439875288474938864:2341], owner: [1:7439875288474938861:2339], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T23:15:52.725261Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439875288474938864:2341], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:15:52.725470Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439875288474938864:2341], row count: 1, finished: 1 2024-11-21T23:15:52.725502Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439875288474938864:2341], owner: [1:7439875288474938861:2339], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2024-11-21T23:15:52.727641Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230952721, txId: 281474976710664] shutting down 2024-11-21T23:15:54.430478Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875298450702102:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:54.430525Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000d7d/r3tmp/tmpNApNps/pdisk_1.dat 2024-11-21T23:15:54.693978Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16669, node 2 2024-11-21T23:15:54.807132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:54.807206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:54.813828Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:54.818861Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:54.818883Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:54.818891Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:54.818983Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:55.122638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:57.356315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875311335604719:2298], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:57.356424Z ... , will use file: (empty maybe) 2024-11-21T23:16:02.559477Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:02.559488Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:02.559598Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:02.962007Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:03.012491Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:03.112248Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7439875335216842305:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:03.112293Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:16:03.172066Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439875338639893331:2074];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:03.243069Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:03.381827Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:16:03.416704Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:03.416816Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:03.423278Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2024-11-21T23:16:03.445653Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:03.445749Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:03.445937Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:03.459411Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2024-11-21T23:16:03.460925Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:03.588451Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:03.637667Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439875337801014598:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:03.639359Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:16:03.683167Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:03.776442Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:03.776539Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:03.808445Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:03.811595Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:03.824836Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2024-11-21T23:16:03.826791Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:03.851297Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2024-11-21T23:16:03.855813Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:07.226547Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7439875335028766469:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:07.226613Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:08.114116Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7439875335216842305:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:08.114188Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:08.158864Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7439875338639893331:2074];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:08.158934Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:08.639915Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7439875337801014598:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:08.639985Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:10.436548Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:10.688777Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439875369388506246:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:10.688863Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:10.691588Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439875369388506258:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:10.697956Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715663:3, at schemeshard: 72057594046644480 2024-11-21T23:16:10.740679Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439875369388506260:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715663 completed, doublechecking } 2024-11-21T23:16:11.056351Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8g5z9y67n8vtan003syjv4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDgxOTRkMjQtM2UwNTg5MjMtMTViYTljMTMtMWU2OGQxYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:11.189557Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:11.630033Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd8g60245x8ndebcyyj42vgq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDgxOTRkMjQtM2UwNTg5MjMtMTViYTljMTMtMWU2OGQxYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:11.787293Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2024-11-21T23:16:12.500284Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd8g60y37xygb0vh9e0bcy9f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDgxOTRkMjQtM2UwNTg5MjMtMTViYTljMTMtMWU2OGQxYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Path not found 2024-11-21T23:16:12.703091Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T23:16:12.719249Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:12.719747Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T23:16:12.721159Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:12.721299Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2024-11-21T23:16:12.721664Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:12.722814Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T23:16:12.723270Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:12.762952Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875338639893538:2104], Type=268959746 >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> SystemView::QueryStatsFields [GOOD] >> SystemView::QueryStatsAllTables >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> SystemView::PartitionStatsTtlFields >> TSchemeShardViewTest::AsyncCreateDifferentViews >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod >> Viewer::PDiskMerging >> Viewer::JsonStorageListingV2 >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> Viewer::PDiskMerging [GOOD] >> Viewer::LevenshteinDistance [GOOD] >> Viewer::QueryExecuteScript >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> Viewer::SelectStringWithBase64Encoding >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:18.649309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:18.649402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:18.649434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:18.649472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:18.649511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:18.649540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:18.649583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:18.649848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:18.726445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:18.726508Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:18.741635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:18.754482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:18.754735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:18.763645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:18.764317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:18.765009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:18.765300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:18.775294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:18.776688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:18.776759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:18.777018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:18.777068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:18.777108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:18.777197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:18.784015Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:18.920109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:18.920374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:18.920604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:18.920835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:18.920908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:18.926253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:18.926421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:18.926620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:18.926699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:18.926751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:18.926786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:18.929373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:18.929444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:18.929481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:18.933604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:18.933659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:18.933709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:18.933758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:18.937908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:18.940906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:18.941111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:18.942125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:18.942256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:18.942308Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:18.942640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:18.942690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:18.942853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:18.942932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:18.946808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:18.946864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:18.946993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:18.947033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:18.947333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:18.947368Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:18.947437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:18.947483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:18.947521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:18.947560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:18.947617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:18.947655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:18.947712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:18.947762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:18.947797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:18.949346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:18.949454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:18.949489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:18.949527Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:18.949556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:18.949649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... t reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:16:19.058198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T23:16:19.058221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2024-11-21T23:16:19.058257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T23:16:19.059116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:16:19.059157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:19.059185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:16:19.059217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2024-11-21T23:16:19.060564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:19.060635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:19.060668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:16:19.060712Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2024-11-21T23:16:19.060749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:16:19.061330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:19.061431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:19.061464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:16:19.061488Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T23:16:19.061512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:16:19.061576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T23:16:19.063651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:16:19.063745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2024-11-21T23:16:19.064034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T23:16:19.064092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T23:16:19.064211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:16:19.064232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T23:16:19.064276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:16:19.064304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:16:19.064916Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T23:16:19.065037Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:16:19.065082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:16:19.065118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:330:2322] 2024-11-21T23:16:19.065291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:16:19.065334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:16:19.065356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:330:2322] 2024-11-21T23:16:19.065454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:16:19.065485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:330:2322] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T23:16:19.065944Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:19.066159Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 232us result status StatusSuccess 2024-11-21T23:16:19.066762Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:19.067275Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:19.067462Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 173us result status StatusSuccess 2024-11-21T23:16:19.067734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:19.068301Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:19.068450Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 158us result status StatusSuccess 2024-11-21T23:16:19.068672Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2024-11-21T23:09:33.860159Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.860186Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.860238Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2024-11-21T23:09:33.860674Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:09:33.860732Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.860762Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.862190Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009185s 2024-11-21T23:09:33.862675Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:09:33.862726Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.862744Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.862786Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008727s 2024-11-21T23:09:33.863442Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2024-11-21T23:09:33.863472Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.863495Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2024-11-21T23:09:33.863542Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009305s 2024-11-21T23:09:33.889888Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1732230573889850 2024-11-21T23:09:34.511722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439873666445744692:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:34.511957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:34.575482Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439873666342371871:2266];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:34.816609Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:09:34.822975Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0029ed/r3tmp/tmpnVzyyD/pdisk_1.dat 2024-11-21T23:09:34.925911Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:09:35.343426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:35.343560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:35.348060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:09:35.348134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:09:35.474547Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:09:35.477134Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:09:35.483050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:35.484694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:09:35.529043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 17069, node 1 2024-11-21T23:09:35.791055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0029ed/r3tmp/yandexo06oCf.tmp 2024-11-21T23:09:35.791091Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0029ed/r3tmp/yandexo06oCf.tmp 2024-11-21T23:09:35.791218Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0029ed/r3tmp/yandexo06oCf.tmp 2024-11-21T23:09:35.791317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:09:35.901996Z INFO: TTestServer started on Port 29076 GrpcPort 17069 TClient is connected to server localhost:29076 PQClient connected to localhost:17069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:09:36.492910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:09:39.517717Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439873666445744692:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:39.517801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:39.548669Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439873666342371871:2266];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:09:39.548728Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:09:40.091351Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873692112175716:2283], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.091465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439873692112175743:2286], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.091656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:09:40.112487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:09:40.224251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439873692112175748:2287], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:09:40.674912Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.131347s 2024-11-21T23:09:40.674985Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.131463s 2024-11-21T23:09:40.710731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:09:40.714973Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439873692215549527:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:40.716192Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTNjMTMyMDktY2Y1OGM5YTMtYjlkYTcxMjItMTBhZjU5NTE=, ActorId: [1:7439873692215549453:2303], ActorState: ExecuteState, TraceId: 01jd8ft1xd6v67sqc7mr5b9n2p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:40.716642Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439873692112175783:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:09:40.718284Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODkwZjAyMTMtOGNlMWE2MTgtZDJiNzVlNzQtZDU1Y2E1NTc=, ActorId: [2:7439873692112175708:2281], ActorState: ExecuteState, TraceId: 01jd8ft1vfeddn4ev4xqdth0sz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:09:40.719133Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:09:40.718271Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } ... DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1454 count 9 nextOffset 9 batches 1 2024-11-21T23:16:14.680366Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0test-message-group-id' seqNo 10 partNo 0 2024-11-21T23:16:14.680394Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0test-message-group-id' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 1612 count 10 nextOffset 10 batches 1 2024-11-21T23:16:14.681373Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,10 HeadOffset 0 endOffset 0 curOffset 10 d0000000000_00000000000000000000_00000_0000000010_00000| size 1208 WTime 1732230974680 2024-11-21T23:16:14.691122Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 9 queued_in_partition_duration_ms: 26 } 2024-11-21T23:16:14.781658Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 1 2024-11-21T23:16:14.681712Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:16:14.688768Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 2024-11-21T23:16:14.781745Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 2 2024-11-21T23:16:14.688853Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.781773Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 3 2024-11-21T23:16:14.688935Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T23:16:14.781798Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 4 2024-11-21T23:16:14.688971Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.781825Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 5 2024-11-21T23:16:14.689011Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T23:16:14.781861Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 6 2024-11-21T23:16:14.689032Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.781905Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 7 2024-11-21T23:16:14.689065Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T23:16:14.781936Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 8 2024-11-21T23:16:14.689091Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.781971Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 9 2024-11-21T23:16:14.689125Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2024-11-21T23:16:14.689145Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.689175Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2024-11-21T23:16:14.689195Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.689227Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2024-11-21T23:16:14.689246Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.689280Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2024-11-21T23:16:14.689300Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.689334Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2024-11-21T23:16:14.689357Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.689390Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2024-11-21T23:16:14.689408Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:16:14.689439Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2024-11-21T23:16:14.689654Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:16:14.689696Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T23:16:14.689875Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T23:16:14.690012Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2024-11-21T23:16:14.782471Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: acknoledged message 10 2024-11-21T23:16:14.690011Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2024-11-21T23:16:14.690053Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T23:16:14.690459Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T23:16:14.690488Z node 17 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T23:16:14.690580Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732230974653 queuesize 0 startOffset 0 2024-11-21T23:16:14.882090Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: close. Timeout = 0 ms 2024-11-21T23:16:14.882142Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session will now close 2024-11-21T23:16:14.882191Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: aborting 2024-11-21T23:16:14.882702Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:16:14.883430Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0 grpc read done: success: 0 data: 2024-11-21T23:16:14.883493Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0 grpc read failed 2024-11-21T23:16:14.883532Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0 grpc closed 2024-11-21T23:16:14.883564Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0 is DEAD 2024-11-21T23:16:14.884763Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:16:14.884910Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:16:14.884964Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7439875383555712039:2605] destroyed 2024-11-21T23:16:14.885018Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:16:14.893642Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|2a7d18a3-1ccfe85a-fe24c97d-6e6288d4_0] Write session: destroy >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> Viewer::Cluster10000Tablets >> TSchemeShardViewTest::AsyncDropSameView >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> Viewer::JsonAutocompleteEmpty >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> TSchemeShardViewTest::AsyncCreateSameView >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> Viewer::JsonAutocompleteStartOfDatabaseName >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends >> TSchemeShardViewTest::EmptyQueryText >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> TSchemeShardViewTest::DropView >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> Viewer::JsonAutocompleteSimilarDatabaseName >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TCdcStreamTests::CreateStream >> SystemView::CollectScriptingQueries [GOOD] >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:21.263517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:21.263624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:21.263666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:21.263718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:21.263769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:21.263798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:21.263858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:21.264195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:21.342723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:21.342786Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:21.357442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:21.361426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:21.361616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:21.371910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:21.372636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:21.373298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:21.373642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:21.378377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:21.379729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:21.379790Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:21.380035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:21.380088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:21.380130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:21.380224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.387134Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:21.528606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:21.528868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.529086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:21.529323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:21.529400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.532568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:21.532711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:21.532916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.532984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:21.533039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:21.533075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:21.540564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.540649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:21.540690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:21.543341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.543405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.543470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:21.543521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:21.547761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:21.549933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:21.550123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:21.551153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:21.551303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:21.551358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:21.551664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:21.551717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:21.551919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:21.552031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:21.554223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:21.554285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:21.554474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:21.554517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:21.554893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.554944Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:21.555052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:21.555096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:21.555139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:21.555179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:21.555235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:21.555275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:21.555356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:21.555399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:21.555481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:21.557300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:21.557411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:21.557462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:21.557519Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:21.557563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:21.557664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... leModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2024-11-21T23:16:21.624835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T23:16:21.625552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2024-11-21T23:16:21.625654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T23:16:21.626285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:16:21.626453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T23:16:21.626789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:21.626885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:21.626970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2024-11-21T23:16:21.627104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T23:16:21.627279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:21.627342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:16:21.629346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:21.629410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:21.629540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:16:21.629689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:21.629734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T23:16:21.629777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:16:21.630120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:16:21.630166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:16:21.630276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:16:21.630319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:16:21.630363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T23:16:21.630420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:16:21.630462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:16:21.630511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:16:21.630590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:16:21.630636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T23:16:21.630673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T23:16:21.630705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T23:16:21.631325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:21.631439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:21.631517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:16:21.631595Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:16:21.631632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:21.632453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:21.632533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:21.632563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:16:21.632606Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T23:16:21.632654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:16:21.632746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T23:16:21.640925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:16:21.641018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:16:21.641107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:21.645209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:16:21.647110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:16:21.647241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2024-11-21T23:16:21.647567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:16:21.647618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2024-11-21T23:16:21.647714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T23:16:21.647734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T23:16:21.648241Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:16:21.648337Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:16:21.648374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:16:21.648410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:326:2318] 2024-11-21T23:16:21.648465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:16:21.648485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:326:2318] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2024-11-21T23:16:21.649081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:21.649282Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 230us result status StatusPathDoesNotExist 2024-11-21T23:16:21.649467Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::FuzzySearcherPriority [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:21.850211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:21.850319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:21.850365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:21.850455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:21.850504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:21.850535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:21.850593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:21.850917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:21.935680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:21.935750Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:21.964618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:21.968789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:21.968992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:21.988909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:21.989641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:21.990288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:21.990643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:21.995571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:21.996959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:21.997029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:21.997306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:21.997365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:21.997409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:21.997500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.004655Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:22.152931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:22.153139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.153392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:22.153641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:22.153731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.156555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:22.156711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:22.156903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.156975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:22.157028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:22.157065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:22.158996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.159066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:22.159105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:22.161165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.161215Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.161262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:22.161310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:22.165234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:22.169693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:22.169942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:22.170998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:22.171145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:22.171190Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:22.171495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:22.171550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:22.171753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:22.171849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:22.179499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:22.179581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:22.179738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:22.179777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:22.180085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.180128Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:22.180235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:22.180268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:22.180304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:22.180336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:22.180379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:22.180408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:22.180473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:22.180521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:22.180565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:22.182214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:22.182327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:22.182366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:22.182445Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:22.182492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:22.182622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... Propose Complete, txId: 103, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2024-11-21T23:16:22.225760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T23:16:22.226550Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:16:22.228352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T23:16:22.228532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T23:16:22.228983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:22.229124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:22.229187Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T23:16:22.229322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T23:16:22.229507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:22.229576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:16:22.233353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:22.233422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:22.233603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:16:22.233707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:22.233762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:16:22.233817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T23:16:22.234150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.234219Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:16:22.234423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:16:22.234463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:16:22.234527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:16:22.234573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:16:22.234626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:16:22.234659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:16:22.234741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:16:22.234785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T23:16:22.234821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T23:16:22.234872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T23:16:22.235686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:22.235796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:22.235833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:16:22.235898Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T23:16:22.235939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:22.236755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:22.236830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:22.236856Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:16:22.236884Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T23:16:22.236913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:16:22.236980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:16:22.240128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:16:22.241212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2024-11-21T23:16:22.241508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2024-11-21T23:16:22.241551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2024-11-21T23:16:22.241667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:16:22.241700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2024-11-21T23:16:22.241762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2024-11-21T23:16:22.241801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2024-11-21T23:16:22.242313Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2024-11-21T23:16:22.242537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:16:22.242585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:16:22.242621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:302:2294] 2024-11-21T23:16:22.242809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:16:22.242832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:302:2294] 2024-11-21T23:16:22.242939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2024-11-21T23:16:22.243012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:16:22.243033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:302:2294] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2024-11-21T23:16:22.243560Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:22.243738Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 222us result status StatusSuccess 2024-11-21T23:16:22.244068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Viewer::TabletMerging >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TSchemeShardViewTest::DropView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] Test command err: 2024-11-21T23:14:43.168262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:43.168325Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:43.264695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:44.938876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2024-11-21T23:14:45.155809Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:45.156406Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:45.157301Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6893284812950498181 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:45.210846Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:45.211487Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:45.211727Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4588116405971477891 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:45.214903Z node 4 :BS_LOCALRECOVERY CRIT: VDISK[80000002:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:14:45.287508Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:45.299389Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:45.299672Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2808324361120975804 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:45.302320Z node 3 :BS_LOCALRECOVERY CRIT: VDISK[80000001:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:14:45.385874Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:45.386373Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:45.386583Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpN8MwZQ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runne ... Switch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:16:15.413482Z node 155 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:16:15.451221Z node 159 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:16:15.451773Z node 159 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:16:15.451953Z node 159 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9575508661407705389 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:16:15.496830Z node 157 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:16:15.497549Z node 157 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:16:15.497709Z node 157 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15644929611106892988 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:16:15.548863Z node 158 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:16:15.549424Z node 158 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:16:15.549621Z node 158 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2243763353729992492 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:16:15.606191Z node 160 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:16:15.606807Z node 160 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:16:15.607002Z node 160 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/002379/r3tmp/tmpcVr50l/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1992478929166239922 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:16:15.878805Z node 154 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:15.878915Z node 154 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:16.026859Z node 154 :STATISTICS WARN: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2024-11-21T23:16:19.300028Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:19.300130Z node 163 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:19.365448Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:22.649565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:22.649663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:22.649706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:22.649773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:22.649832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:22.649860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:22.649921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:22.650259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:22.742516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:22.742581Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:22.760366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:22.764378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:22.764583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:22.775397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:22.776154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:22.776812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:22.777134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:22.781793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:22.783198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:22.783278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:22.783557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:22.783615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:22.783664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:22.783785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.791011Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:22.916302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:22.916550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.916769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:22.917021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:22.917093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.925723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:22.925881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:22.926080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.926164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:22.926213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:22.926267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:22.932386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.932461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:22.932503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:22.934632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.934685Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.934736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:22.934784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:22.943333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:22.952244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:22.952495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:22.953570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:22.953725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:22.953784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:22.954110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:22.954177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:22.954373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:22.954516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:22.959765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:22.959824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:22.960032Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:22.960088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:22.960474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.960526Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:22.960640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:22.960680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:22.960755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:22.960817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:22.960877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:22.960913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:22.960991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:22.961031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:22.961073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:22.972707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:22.972864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:22.972905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:22.972976Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:22.973035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:22.973223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2024-11-21T23:16:22.976732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2024-11-21T23:16:22.977273Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2024-11-21T23:16:22.977852Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Bootstrap 2024-11-21T23:16:22.998726Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] Become StateWork (SchemeCache [1:271:2263]) 2024-11-21T23:16:23.001410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:23.001647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2024-11-21T23:16:23.023976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2024-11-21T23:16:23.024211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:16:23.024300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T23:16:23.024372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:23.026039Z node 1 :TX_PROXY DEBUG: actor# [1:266:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2024-11-21T23:16:23.034734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T23:16:23.034944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2024-11-21T23:16:23.035420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.035490Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2024-11-21T23:16:23.035555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T23:16:23.035692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:23.036259Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2024-11-21T23:16:23.042431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T23:16:23.042601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T23:16:23.043105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:23.043248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:23.043319Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2024-11-21T23:16:23.043499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T23:16:23.043677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:23.043741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:16:23.046449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:23.046506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:23.046726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:16:23.046855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:23.046894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:16:23.046957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T23:16:23.047326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.047388Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:16:23.047508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:16:23.047541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:16:23.047586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:16:23.047639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:16:23.047677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:16:23.047707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:16:23.047794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:16:23.047834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2024-11-21T23:16:23.047862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2024-11-21T23:16:23.047886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2024-11-21T23:16:23.048734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:23.048849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:23.048884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:16:23.048920Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2024-11-21T23:16:23.048961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:23.049659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:23.049726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:23.049755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:16:23.126512Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2024-11-21T23:16:23.126581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:16:23.126704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2024-11-21T23:16:23.130823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:16:23.133307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:22.777445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:22.777529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:22.777566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:22.777617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:22.777665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:22.777691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:22.777742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:22.778107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:22.864390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:22.864442Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:22.874730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:22.878574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:22.878761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:22.891376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:22.892949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:22.893598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:22.893981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:22.900159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:22.901645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:22.901705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:22.901877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:22.901911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:22.901937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:22.902042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:22.914690Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:23.059153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:23.059430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.059681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:23.059923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:23.059989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.064820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:23.065002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:23.065218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.065294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:23.065347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:23.065391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:23.067900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.067963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:23.067997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:23.071269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.071325Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.071369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:23.071422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:23.075426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:23.077916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:23.078118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:23.079325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:23.079494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:23.079545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:23.079865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:23.079921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:23.080101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:23.080201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:23.082674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:23.082726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:23.082910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:23.082953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:23.083307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.083353Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:23.083490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:23.083533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:23.083576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:23.083615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:23.083666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:23.083699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:23.083782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:23.083827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:23.083860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:23.085662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:23.085766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:23.085804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:23.085853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:23.085898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:23.085985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... nside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2024-11-21T23:16:23.218149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:23.286782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2024-11-21T23:16:23.286997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T23:16:23.287062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:23.290014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2024-11-21T23:16:23.290232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2024-11-21T23:16:23.290492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.290563Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2024-11-21T23:16:23.290622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2024-11-21T23:16:23.290776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:23.292862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:16:23.293012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T23:16:23.293349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:23.293455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:23.293506Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2024-11-21T23:16:23.293705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2024-11-21T23:16:23.293902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:23.293978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:16:23.296465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:23.296515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:23.296675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:16:23.296834Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:23.296873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T23:16:23.296915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2024-11-21T23:16:23.297280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2024-11-21T23:16:23.297332Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2024-11-21T23:16:23.297439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:16:23.297476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:16:23.297541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T23:16:23.297582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:16:23.297619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:16:23.297654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:16:23.297723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:16:23.297762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T23:16:23.297797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T23:16:23.297827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T23:16:23.298475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:23.298571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:23.298613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:16:23.298663Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:16:23.298732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:23.299482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:23.299579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:16:23.299609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:16:23.299640Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T23:16:23.299679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:16:23.299755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T23:16:23.299993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:16:23.300038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:16:23.300104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:23.303277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:16:23.389398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:16:23.392625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:16:23.392914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:16:23.392954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:16:23.393371Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:16:23.393468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:16:23.393501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:320:2312] TestWaitNotification: OK eventTxId 102 2024-11-21T23:16:23.393959Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:23.394220Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 249us result status StatusPathDoesNotExist 2024-11-21T23:16:23.394432Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldSaveState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2024-11-21T23:15:48.739235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875274536787863:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:48.739295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000d48/r3tmp/tmp0UwdyD/pdisk_1.dat 2024-11-21T23:15:49.781409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:49.926968Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:49.945571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:49.945679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:49.959930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9911, node 1 2024-11-21T23:15:50.129368Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:50.129400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:50.129409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:50.129512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:50.831593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.901340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:53.464004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625380:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.464156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.464914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625406:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.464951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625409:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.465006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625410:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.465033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625411:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.465059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625413:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.465270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.465489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625405:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.465519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625408:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.470409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2024-11-21T23:15:53.474625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625429:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.474682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625431:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.474878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.476666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625519:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.476710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625513:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.476734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625514:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.476758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625516:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.476783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625517:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.476814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625518:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.487409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625607:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.487504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625606:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.487540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875296011625601:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.487654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:53.556422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875296011625421:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2024-11-21T23:15:53.556503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875296011625428:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2024-11-21T23:15:53.556535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875296011625515:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2024-11-21T23:15:53.556568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875296011625422:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2024-11-21T23:15:53.556599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875296011625423:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2024-11-21T23:15:53.556629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875296011625530:23 ... nsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2024-11-21T23:16:11.049615Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439875370981637722:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2024-11-21T23:16:11.248525Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8g5zkwbgk830a83pkbess7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=MmFmNTc0MjUtMWFmNDZhMDAtNjRkOGYxMzAtMTY3YTI1MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:11.286654Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:11.656451Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd8g604cerbsp4pmxgm5m8mf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=MmFmNTc0MjUtMWFmNDZhMDAtNjRkOGYxMzAtMTY3YTI1MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:11.713883Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2024-11-21T23:16:12.615454Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jd8g61159xp7cj3ts6njybth, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=MmFmNTc0MjUtMWFmNDZhMDAtNjRkOGYxMzAtMTY3YTI1MTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:12.750208Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root
: Error: Access denied 2024-11-21T23:16:12.838455Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1
: Error: Access denied 2024-11-21T23:16:12.876206Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys
: Error: Access denied 2024-11-21T23:16:12.923650Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys
: Error: Access denied 2024-11-21T23:16:13.040243Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/.sys/partition_stats
: Error: Access denied 2024-11-21T23:16:13.095622Z node 11 :TX_PROXY ERROR: Access denied for user0@builtin with access DescribeSchema to path Root/Tenant1/.sys/partition_stats
: Error: Access denied 2024-11-21T23:16:13.162633Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 13 2024-11-21T23:16:13.177971Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:13.197775Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 14 2024-11-21T23:16:13.203086Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:13.203251Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2024-11-21T23:16:13.203851Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:13.206830Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2024-11-21T23:16:13.219159Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:13.234762Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[15:7439875343191319972:2098], Type=268959746 2024-11-21T23:16:13.237293Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[12:7439875347329646199:2104], Type=268959746 2024-11-21T23:16:13.237325Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[12:7439875347329646199:2104], Type=268959746 2024-11-21T23:16:13.237350Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[12:7439875347329646199:2104], Type=268959746 2024-11-21T23:16:13.237373Z node 11 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[12:7439875347329646199:2104], Type=268959746 2024-11-21T23:16:16.521664Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7439875391091641618:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:16.521711Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000d48/r3tmp/tmp0Rlrw0/pdisk_1.dat 2024-11-21T23:16:16.692406Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:16.718725Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:16.718826Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:16.724701Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12047, node 16 2024-11-21T23:16:16.782890Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:16.782923Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:16.782938Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:16.783083Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:17.086755Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:17.095420Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:16:17.105024Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:20.867187Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439875408271511499:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:20.867250Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:20.867469Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439875408271511511:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:20.870247Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:16:20.882937Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439875408271511513:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:16:21.120542Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jd8g698112p9gn90p34pqspx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZGZhMGJjYTUtNzgzNzA1YzgtZDg5M2IwNzQtOTA2NTY2ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:21.348801Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jd8g69krdcppj7jemdwynt11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YjNmMmMyNjgtNjRjYzg5ZDUtYmRjZjRhOTgtYWI1NjE5ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:21.374729Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230981390, txId: 281474976710662] shutting down 2024-11-21T23:16:21.521913Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7439875391091641618:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:21.522002Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:21.631973Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8g69sgb6n3vzqfrr8v87ng, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MzM1MjlkODYtZGI1OTc4NC04MmVhNTktNmQ3NTJjOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:21.635095Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439875412566478981:2340], owner: [16:7439875412566478978:2338], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T23:16:21.635863Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439875412566478981:2340], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:21.636397Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439875412566478981:2340], row count: 2, finished: 1 2024-11-21T23:16:21.636435Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439875412566478981:2340], owner: [16:7439875412566478978:2338], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T23:16:21.641916Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230981630, txId: 281474976710664] shutting down >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> TSchemeShardViewTest::ReadOnlyMode >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::Negative |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TableWriter::Backup [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] >> TableWriter::Restore [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::RebootSchemeShard |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:25.294680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:25.294771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:25.294805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:25.294853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:25.294896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:25.294929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:25.294987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:25.295278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:25.362664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:25.362719Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:25.377563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:25.381781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:25.381953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:25.388515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:25.389066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:25.389649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:25.389936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:25.393737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:25.394858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:25.394927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:25.395135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:25.395169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:25.395198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:25.395282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.401377Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:25.515695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:25.515956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.516160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:25.516380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:25.516445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.519678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:25.519827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:25.520001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.520068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:25.520112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:25.520151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:25.522777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.522835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:25.522871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:25.524660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.524709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.524746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:25.524793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:25.528595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:25.532031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:25.532317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:25.533267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:25.533407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:25.533463Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:25.533758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:25.533811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:25.533967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:25.534048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:25.536218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:25.536263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:25.536441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:25.536486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:25.536864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.536902Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:25.537005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:25.537043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:25.537095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:25.537140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:25.537181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:25.537217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:25.537281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:25.537324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:25.537352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:25.538828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:25.538973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:25.538999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:25.539047Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:25.539084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:25.539170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ead records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.874470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.874864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.874943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.875121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.875217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.875304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.875511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.875615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.875786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.875989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.876114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.876174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.876231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.891788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:25.891877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:25.892565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:25.892627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:25.892673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:25.894133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:376:2347] sender: [1:432:2058] recipient: [1:15:2062] 2024-11-21T23:16:25.934659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:25.935070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2024-11-21T23:16:25.935156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2024-11-21T23:16:25.935333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:16:25.935427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2024-11-21T23:16:25.935523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:25.942934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2024-11-21T23:16:25.943157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2024-11-21T23:16:25.943363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.943422Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2024-11-21T23:16:25.943502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2024-11-21T23:16:25.943643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:25.947369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2024-11-21T23:16:25.947614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2024-11-21T23:16:25.948473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:25.948602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:25.948681Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2024-11-21T23:16:25.948844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2024-11-21T23:16:25.949048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:25.949143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2024-11-21T23:16:25.951809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:25.951876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:25.952109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:16:25.952244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:25.952295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:425:2386], at schemeshard: 72057594046678944, txId: 103, path id: 1 2024-11-21T23:16:25.952337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:425:2386], at schemeshard: 72057594046678944, txId: 103, path id: 3 2024-11-21T23:16:25.952745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:16:25.952801Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2024-11-21T23:16:25.952907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2024-11-21T23:16:25.952948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:16:25.952995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2024-11-21T23:16:25.953046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2024-11-21T23:16:25.953081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:16:25.953118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:16:25.953230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:16:25.953298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2024-11-21T23:16:25.953334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T23:16:25.953363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2024-11-21T23:16:25.954286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:16:25.954409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:16:25.954456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:16:25.954523Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:16:25.954567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2024-11-21T23:16:25.955006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:16:25.955071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:16:25.955174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:16:25.955202Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2024-11-21T23:16:25.955243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:16:25.955350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2024-11-21T23:16:25.959379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:16:25.959509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 |85.7%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 |85.7%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc |85.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> Worker::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] Test command err: 2024-11-21T23:14:37.689210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:37.689288Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:37.880093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:39.711523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2024-11-21T23:14:39.917029Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:39.917552Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:39.918405Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7040474682620541832 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:39.964424Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:39.964905Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:39.965780Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13446516995004748565 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:40.014510Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:40.015096Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:40.015262Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14641864762941907263 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:40.072924Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:40.073498Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:40.073700Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7252146604297373457 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:40.111632Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:40.112463Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:40.112638Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18442744483712146714 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:40.161049Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:40.161524Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:40.161684Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c0/r3tmp/tmpekoWC8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/bui ... istered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.BackupReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.BackupReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.PrioritizedMvccSnapshotReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.UnprotectedMvccSnapshotReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.TtlReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.TtlReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.EnableLockedWrites was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.MaxLockedWritesPerKey was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.ChangeRecordDebugPrint was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.IncrementalRestoreReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.IncrementalRestoreReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.CdcInitialScanReadAheadLo was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DataShardControls.CdcInitialScanReadAheadHi was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerRequestDataSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardReadSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardIncomingReadSetSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.DefaultTimeoutMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.VolatilePlanLeaseMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.PlanAheadTimeShiftMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.ForceShardSplitDataSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.DisableForceShardSplit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.ProfileSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.GuardedSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.MemoryLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheTargetSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheReleaseRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableLocalSyncLogDataCutting was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DefaultHugeGarbagePerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.HugeDefragFreeSpaceBorderPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TabletControls.MaxCommitRedoMB was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThreshold was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplier was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestReportingDelayMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisks was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control BlobStorageControllerControls.EnableSelfHealWithDegraded was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. 2024-11-21T23:16:24.198746Z node 122 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:24.198842Z node 122 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:24.246229Z node 122 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:16:25.432099Z node 123 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:25.432179Z node 123 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:25.499822Z node 123 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> SystemView::TopPartitionsFollowers [GOOD] >> SystemView::TabletsShards >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> TSchemeShardServerLess::StorageBilling >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> TSchemeShardServerLess::BaseCase >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> TSchemeShardServerLess::Fake [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2024-11-21T23:14:38.765147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:38.765210Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:38.960008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:43.502113Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:43.502220Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:43.559628Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:47.317742Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:47.317815Z node 19 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:47.386321Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:48.427693Z node 19 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/dc-1/users/tenant-1" resources { storage_units { unit_kind: "hdd" count: 1 } } database_quotas { } } 2024-11-21T23:14:48.427924Z node 19 :CMS_TENANTS DEBUG: Add tenant /dc-1/users/tenant-1 (txid = 481048) 2024-11-21T23:14:48.453550Z node 19 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2024-11-21T23:14:48.453943Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) Bootstrap 2024-11-21T23:14:48.454217Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd" } } } 2024-11-21T23:14:48.455169Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 2 2024-11-21T23:14:48.455402Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 1 Name: "/dc-1/users/tenant-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2024-11-21T23:14:48.458460Z node 19 :CMS_TENANTS DEBUG: Add subscription to /dc-1/users/tenant-1 for [19:528:2137] 2024-11-21T23:14:48.473086Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) got config response: Status { Success: true AssignedStoragePoolId: 1 } Success: true ConfigTxSeqNo: 3 2024-11-21T23:14:48.473184Z node 19 :CMS_TENANTS DEBUG: TPoolManip(/dc-1/users/tenant-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2024-11-21T23:14:48.473376Z node 19 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /dc-1/users/tenant-1:hdd of /dc-1/users/tenant-1 state=ALLOCATED 2024-11-21T23:14:48.489770Z node 19 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /dc-1/users/tenant-1:hdd 2024-11-21T23:14:48.489931Z node 19 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /dc-1/users/tenant-1 to CREATING_SUBDOMAIN 2024-11-21T23:14:48.507215Z node 19 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /dc-1/users/tenant-1 2024-11-21T23:14:48.522979Z node 19 :CMS_TENANTS DEBUG: TSubdomainManip(/dc-1/users/tenant-1)::Bootstrap 2024-11-21T23:14:48.523058Z node 19 :CMS_TENANTS DEBUG: TSubDomainManip(/dc-1/users/tenant-1) create subdomain 2024-11-21T23:14:48.527447Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2024-11-21T23:14:48.538146Z node 19 :CMS_TENANTS DEBUG: TSubdomainManip(/dc-1/users/tenant-1) got propose result: Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046578944 PathId: 3 2024-11-21T23:14:48.564683Z node 19 :CMS_TENANTS DEBUG: TSubdomainManip(/dc-1/users/tenant-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715657 2024-11-21T23:14:48.706608Z node 24 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:48.707354Z node 24 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:48.708263Z node 24 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1055404375269468363 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:48.747814Z node 20 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:48.748254Z node 20 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:48.748437Z node 20 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12067696266536867679 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:48.752809Z node 20 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:14:48.814004Z node 26 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:48.814763Z node 26 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:48.814982Z node 26 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/0023c4/r3tmp/tmpKC2PVm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 445408688791412115 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch ... 1/1 2024-11-21T23:16:24.486977Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2024-11-21T23:16:24.487102Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2024-11-21T23:16:24.487185Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715661, ready parts: 1/1, is published: false 2024-11-21T23:16:24.487388Z node 154 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [154:1357:2609], msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72057594046578944 2024-11-21T23:16:24.487464Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2024-11-21T23:16:24.487558Z node 154 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2024-11-21T23:16:24.487650Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715661:0 2024-11-21T23:16:24.487763Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 12 2024-11-21T23:16:24.487859Z node 154 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 1, subscribers: 1 2024-11-21T23:16:24.487932Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046578944, LocalPathId: 3], 7 2024-11-21T23:16:24.496174Z node 154 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3 2024-11-21T23:16:24.496296Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72075186233409546 2024-11-21T23:16:24.496591Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2024-11-21T23:16:24.496942Z node 154 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2024-11-21T23:16:24.497034Z node 154 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 tablet 72057594046578944 removed=1 2024-11-21T23:16:24.497069Z node 154 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 acknowledged 2024-11-21T23:16:24.497102Z node 154 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 acknowledged 2024-11-21T23:16:24.497272Z node 154 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2024-11-21T23:16:24.497310Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 281474976715661, path id: [OwnerId: 72057594046578944, LocalPathId: 3] 2024-11-21T23:16:24.497496Z node 154 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2024-11-21T23:16:24.497535Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [154:664:2238], at schemeshard: 72057594046578944, txId: 281474976715661, path id: 3 2024-11-21T23:16:24.499137Z node 154 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2024-11-21T23:16:24.499234Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2024-11-21T23:16:24.499288Z node 154 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 281474976715661 2024-11-21T23:16:24.499389Z node 154 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 281474976715661, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], version: 7 2024-11-21T23:16:24.499514Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2024-11-21T23:16:24.499686Z node 154 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 281474976715661, subscribers: 1 2024-11-21T23:16:24.499783Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [154:1863:2379] 2024-11-21T23:16:24.504504Z node 154 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046578944, msg: DomainSchemeShard: 72057594046578944 DomainPathId: 3 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 3 TenantHive: 18446744073709551615 TenantSysViewProcessor: 72075186233409553 TenantRootACL: "" TenantStatisticsAggregator: 72075186233409554 TenantGraphShard: 18446744073709551615 2024-11-21T23:16:24.504596Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2024-11-21T23:16:24.504676Z node 154 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046578944, LocalPathId: 3], Generation: 2, ActorId:[154:1357:2609], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 3, TenantHive: 18446744073709551615, TenantSysViewProcessor: 72075186233409553, TenantStatisticsAggregator: 72075186233409554, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 3, tenantHive: 18446744073709551615, tenantSysViewProcessor: 72075186233409553, at schemeshard: 72057594046578944 2024-11-21T23:16:24.504769Z node 154 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2024-11-21T23:16:24.504806Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2024-11-21T23:16:24.504965Z node 154 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2024-11-21T23:16:24.505003Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [154:1578:2763], at schemeshard: 72075186233409546, txId: 0, path id: 1 2024-11-21T23:16:24.508403Z node 154 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186233409546, cookie: 0 2024-11-21T23:16:24.509709Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 281474976715661 2024-11-21T23:16:24.509795Z node 154 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 2024-11-21T23:16:27.302786Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:27.302898Z node 163 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:27.372371Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks >> TSchemeShardServerLess::BaseCase [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:30.894142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:30.894251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:30.894288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:30.894354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:30.894429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:30.894459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:30.894521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:30.895009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:30.975982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:30.976040Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:30.990804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:30.994336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:30.994551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:31.000660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:31.001199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:31.001884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:31.002243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:31.012963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:31.014474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:31.014548Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:31.014809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:31.014866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:31.014912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:31.015015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.022667Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:31.180352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:31.180620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.180829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:31.181060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:31.181131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.183614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:31.183754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:31.183936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.183993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:31.184049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:31.184086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:31.187605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.187674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:31.187715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:31.191525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.191581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.191629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:31.191673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:31.194993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:31.199113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:31.199336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:31.200388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:31.200543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:31.200602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:31.200861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:31.200910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:31.201087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:31.201184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:31.204877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:31.204925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:31.205100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:31.205141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:31.205533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.205586Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:31.205688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:31.205719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:31.205758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:31.205848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:31.205894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:31.205928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:31.206010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:31.206045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:31.206073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:31.207800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:31.207921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:31.207956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:31.208003Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:31.208043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:31.208165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409547, partId: 0 2024-11-21T23:16:31.586578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409547 2024-11-21T23:16:31.586628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2024-11-21T23:16:31.586681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#104:0 Got OK TEvConfigureStatus from tablet# 72075186234409547 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2024-11-21T23:16:31.590908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.594967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.595209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409548, partId: 0 2024-11-21T23:16:31.595312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409548 2024-11-21T23:16:31.595355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2024-11-21T23:16:31.595399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#104:0 Got OK TEvConfigureStatus from tablet# 72075186234409548 shardIdx# 72057594046678944:7 at schemeshard# 72057594046678944 2024-11-21T23:16:31.595436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2024-11-21T23:16:31.597985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.598129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.598174Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.598211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet 72057594046678944 2024-11-21T23:16:31.598255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2024-11-21T23:16:31.598371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:31.599810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2024-11-21T23:16:31.599973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2024-11-21T23:16:31.600274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:31.600373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:31.600435Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet 72057594046678944 2024-11-21T23:16:31.600687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2024-11-21T23:16:31.600734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet 72057594046678944 2024-11-21T23:16:31.600832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:16:31.600920Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:599:2528], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2024-11-21T23:16:31.602525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:31.602571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2024-11-21T23:16:31.602710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:31.602742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2024-11-21T23:16:31.603071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.603123Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2024-11-21T23:16:31.603155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2024-11-21T23:16:31.603646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:16:31.603720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2024-11-21T23:16:31.603759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2024-11-21T23:16:31.603804Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2024-11-21T23:16:31.603850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2024-11-21T23:16:31.603921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2024-11-21T23:16:31.606137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.606190Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2024-11-21T23:16:31.606290Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2024-11-21T23:16:31.606323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:16:31.606366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2024-11-21T23:16:31.606427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2024-11-21T23:16:31.606483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2024-11-21T23:16:31.606520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2024-11-21T23:16:31.606663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:16:31.607294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2024-11-21T23:16:31.608764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2024-11-21T23:16:31.608810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2024-11-21T23:16:31.609259Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2024-11-21T23:16:31.609339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2024-11-21T23:16:31.609375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:756:2640] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2024-11-21T23:16:31.612109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:31.612280Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2024-11-21T23:16:31.612321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2024-11-21T23:16:31.612457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2024-11-21T23:16:31.612515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2024-11-21T23:16:31.614770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:31.614933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:30.837800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:30.837891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:30.837933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:30.837999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:30.838043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:30.838075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:30.838130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:30.838527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:30.918936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:30.918996Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:30.942940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:30.947915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:30.948122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:30.957555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:30.958238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:30.958974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:30.959312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:30.980750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:30.982200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:30.982288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:30.982551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:30.982607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:30.982652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:30.982751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.990505Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:31.153287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:31.153522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.153777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:31.153990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:31.154055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.158916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:31.159098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:31.159297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.159357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:31.159405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:31.159472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:31.161903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.161950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:31.161976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:31.163728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.163777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.163814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:31.163860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:31.184225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:31.187179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:31.187416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:31.188559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:31.188702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:31.188761Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:31.189026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:31.189082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:31.189289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:31.189445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:31.195087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:31.195141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:31.195371Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:31.195429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:31.195906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.195962Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:31.196073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:31.196110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:31.196153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:31.196191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:31.196232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:31.196263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:31.196312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:31.196340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:31.196364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:31.197601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:31.197679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:31.197706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:31.197742Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:31.197782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:31.197853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... SCHEMESHARD DEBUG: Free shard 72057594046678944:7 hive 72075186233409546 at ss 72057594046678944 2024-11-21T23:16:31.862217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72075186233409546 at ss 72057594046678944 2024-11-21T23:16:31.862379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2024-11-21T23:16:31.862435Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2024-11-21T23:16:31.862549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2024-11-21T23:16:31.862587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T23:16:31.862652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2024-11-21T23:16:31.862713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2024-11-21T23:16:31.862749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2024-11-21T23:16:31.862781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2024-11-21T23:16:31.862948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2024-11-21T23:16:31.864973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2024-11-21T23:16:31.865266Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 2024-11-21T23:16:31.865553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2024-11-21T23:16:31.865924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2024-11-21T23:16:31.867072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2024-11-21T23:16:31.869856Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2024-11-21T23:16:31.870645Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 2024-11-21T23:16:31.871013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2024-11-21T23:16:31.871244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409548 2024-11-21T23:16:31.872954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186234409547 2024-11-21T23:16:31.873129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2024-11-21T23:16:31.873315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:16:31.874654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:16:31.874699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:16:31.874812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2024-11-21T23:16:31.875318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:16:31.875384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2024-11-21T23:16:31.875482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:31.878564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2024-11-21T23:16:31.878622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2024-11-21T23:16:31.879020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2024-11-21T23:16:31.879061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2024-11-21T23:16:31.879148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2024-11-21T23:16:31.879186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2024-11-21T23:16:31.880191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:16:31.880258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2024-11-21T23:16:31.880523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2024-11-21T23:16:31.880581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2024-11-21T23:16:31.881090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2024-11-21T23:16:31.881197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2024-11-21T23:16:31.881253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:920:2787] TestWaitNotification: OK eventTxId 106 2024-11-21T23:16:31.881739Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:31.881894Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 172us result status StatusPathDoesNotExist 2024-11-21T23:16:31.882016Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:16:31.882381Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:31.882542Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 141us result status StatusPathDoesNotExist 2024-11-21T23:16:31.882631Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:16:31.882980Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:31.883108Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 129us result status StatusSuccess 2024-11-21T23:16:31.883360Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409546 is deleted wait until 72075186234409547 is deleted wait until 72075186234409548 is deleted wait until 72075186234409549 is deleted 2024-11-21T23:16:31.883749Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409546 2024-11-21T23:16:31.883802Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409547 2024-11-21T23:16:31.883852Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409548 2024-11-21T23:16:31.883880Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 Deleted tabletId 72075186234409546 Deleted tabletId 72075186234409547 Deleted tabletId 72075186234409548 Deleted tabletId 72075186234409549 >> Viewer::QueryExecuteScript [GOOD] >> Viewer::Plan2SvgOK >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2024-11-21T23:16:13.752874Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7439875376144856862:2048] with connection to localhost:27138:local 2024-11-21T23:16:13.754691Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:14.395379Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:14.395407Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:14.399351Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:15.783481Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:15.783522Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:15.784686Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:16.220142Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2024-11-21T23:16:16.220175Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:16.220590Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T23:16:16.615423Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2024-11-21T23:16:16.615466Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T23:16:17.786544Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7439875396628499763:2048] with connection to localhost:27138:local 2024-11-21T23:16:17.786637Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:18.202588Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:18.202627Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:18.202964Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2024-11-21T23:16:18.724585Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2024-11-21T23:16:18.724625Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2024-11-21T23:16:20.452046Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7439875405582700448:2048] with connection to localhost:27138:local 2024-11-21T23:16:20.452126Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:20.832166Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:20.832202Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:20.834733Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2024-11-21T23:16:21.232324Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2024-11-21T23:16:21.232365Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2024-11-21T23:16:22.790753Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7439875413441551081:2048] with connection to localhost:27138:local 2024-11-21T23:16:22.790843Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:23.277306Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:23.277335Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:23.279779Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:24.766289Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:24.766325Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:24.769713Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2024-11-21T23:16:25.191666Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2024-11-21T23:16:25.191698Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2024-11-21T23:16:26.938659Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7439875430371679984:2048] with connection to localhost:27138:local 2024-11-21T23:16:26.938749Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:27.319055Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:27.319082Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:27.319364Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:28.927438Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:28.927494Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:28.927835Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:16:29.338104Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2024-11-21T23:16:29.338153Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:16:29.338841Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:29.661647Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2024-11-21T23:16:29.661678Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:29.662012Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2024-11-21T23:16:29.786723Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2024-11-21T23:16:29.786761Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1 >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> TBlobStorageWardenTest::TestDeleteStoragePool >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 >> SystemView::TabletsShards [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> SystemView::TopPartitionsRanges [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> Worker::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TabletsShards [GOOD] Test command err: 2024-11-21T23:15:45.704161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875260461348258:2256];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:45.704254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000dac/r3tmp/tmpu4EJMg/pdisk_1.dat 2024-11-21T23:15:46.834311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:47.339564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:47.339671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:47.348498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:47.408426Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11132, node 1 2024-11-21T23:15:47.890102Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:47.890125Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:47.890134Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:47.890257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:48.777114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:48.858293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:48.915916Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875275001668869:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:48.915952Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:48.945436Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439875271347040062:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:48.945492Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:49.024281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:49.024374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:49.048297Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T23:15:49.049551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T23:15:49.147281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:49.147325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:49.173809Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T23:15:49.185621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:49.374288Z node 5 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2024-11-21T23:15:49.374339Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2024-11-21T23:15:49.392456Z node 5 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T23:15:49.395521Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2024-11-21T23:15:49.395593Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2024-11-21T23:15:49.396240Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T23:15:49.396332Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2024-11-21T23:15:49.396404Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2024-11-21T23:15:49.396639Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T23:15:49.396698Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2024-11-21T23:15:49.396802Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2024-11-21T23:15:49.396860Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2024-11-21T23:15:49.396908Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2024-11-21T23:15:49.396953Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2024-11-21T23:15:49.397018Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2024-11-21T23:15:49.397071Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2024-11-21T23:15:49.397117Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2024-11-21T23:15:49.397189Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2024-11-21T23:15:49.397257Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2024-11-21T23:15:49.397326Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2024-11-21T23:15:49.397393Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2024-11-21T23:15:49.397439Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2024-11-21T23:15:49.397522Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2024-11-21T23:15:49.397642Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2024-11-21T23:15:49.000000Z 2024-11-21T23:15:49.398677Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:49.408966Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2024-11-21T23:15:49.412477Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Tenant1 2024-11-21T23:15:49.415803Z node 5 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2024-11-21T23:15:49.431797Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2024-11-21T23:15:49.461694Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [5:7439875275001668887:2058], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T23:15:49.477589Z node 5 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [5:7439875275001668887:2058], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2024-11-21T23:15:49.477639Z node 5 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [5:7439875275001668887:2058], database# /Root/Tenant1, no sysview processor 2024-11-21T23:15:49.530981Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2024-11-21T23:15:49.531038Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2024-11-21T23:15:49.000000Z, query count# 0 2024-11-21T23:15:49.531056Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T23:15:49.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:49.541237Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T23:15:49.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:49.541306Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T23:15:49.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:49.541338Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T23:15:49.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:49.542587Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:49.542631Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:49.547284Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:49.552922Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:49.653772Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2024-11-21T23:15:49.759470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:49.845834Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875275214018385:2189];send_to=[0:7307199536658146131:7762515]; waiting... 2024-11-21T23:15:50.006677Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxCollect::Execute 2024-11-21T23:15:50.006732Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 17, partition interval end# 2024-11-21T23:15:50.000000Z, partition count# 0 2024-11-21T23:15:50.006759Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 18, partition interval ... 9167085771:2137], Recipient [10:7439875399167085794:2272]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T23:16:27.070851Z node 10 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2024-11-21T23:16:27.080868Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7439875395346061561:2055], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T23:16:27.082512Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7439875399167085558:2123], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T23:16:27.084296Z node 9 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [9:7439875395346062100:2285], Recipient [9:7439875395346062114:2296]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T23:16:27.084398Z node 9 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2024-11-21T23:16:27.084804Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [10:7439875403462053174:2193], Recipient [10:7439875399167085748:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:16:27.084089Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7439875395863615965:2541], Recipient [6:7439875374388778983:2216]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:16:27.084125Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:16:27.084135Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T23:16:27.084855Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:16:27.084892Z node 10 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [6:7439875400158583629:2807], serverId# [10:7439875403462053174:2193], sessionId# [0:0:0] 2024-11-21T23:16:27.087311Z node 9 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 268829696, Sender [9:7439875395346062099:2284], Recipient [9:7439875395346062113:2295]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T23:16:27.087388Z node 9 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2024-11-21T23:16:27.089310Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268829696, Sender [9:7439875395346061808:2111], Recipient [9:7439875395346061865:2274]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T23:16:27.090115Z node 9 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:27.092115Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T23:16:27.093205Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:27.098246Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7439875395346061561:2055], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T23:16:27.098754Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:7439875395863616101:2645], Recipient [6:7439875374388778983:2216]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:16:27.098789Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:16:27.098808Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2024-11-21T23:16:27.090868Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [10:7439875399167085733:2111], Recipient [10:7439875399167085748:2270]: NKikimr::TEvTablet::TEvTabletDead 2024-11-21T23:16:27.091217Z node 10 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2024-11-21T23:16:27.091267Z node 10 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 2024-11-21T23:16:27.104232Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439875399167085558:2123], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T23:16:27.111184Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875399167085631:2100], Type=268959746 2024-11-21T23:16:27.111236Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875399167085631:2100], Type=268959746 2024-11-21T23:16:27.111259Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875399167085631:2100], Type=268959746 2024-11-21T23:16:27.111283Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875399167085631:2100], Type=268959746 2024-11-21T23:16:27.111307Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875399167085631:2100], Type=268959746 2024-11-21T23:16:27.111334Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875399167085631:2100], Type=268959746 2024-11-21T23:16:27.111357Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875399167085631:2100], Type=268959746 2024-11-21T23:16:27.111378Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[10:7439875399167085631:2100], Type=268959746 2024-11-21T23:16:27.180475Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [6:7439875374388778983:2216]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:16:27.180514Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2024-11-21T23:16:27.180562Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [6:7439875374388778983:2216], Recipient [6:7439875374388778983:2216]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:16:27.180591Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2024-11-21T23:16:29.571337Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7439875447279539474:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:29.571404Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000dac/r3tmp/tmpeskJBL/pdisk_1.dat 2024-11-21T23:16:29.769749Z node 11 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:29.794480Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:29.794602Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:29.796934Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1817, node 11 2024-11-21T23:16:29.879029Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:29.879054Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:29.879061Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:29.879170Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:30.323910Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:30.331067Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:16:30.345165Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:33.970153Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439875464459409427:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:33.970331Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:33.970957Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7439875464459409462:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:33.976303Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:16:33.987362Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7439875464459409464:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:16:34.206488Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jd8g6jmm42htrz4h7h6psrvv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=N2RmOTZkNmEtMjVhNGE0ZTQtODYxYjc4ODQtNmFkZWE2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:34.208661Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7439875468754376845:2322], owner: [11:7439875468754376841:2320], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:34.209253Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7439875468754376845:2322], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:16:34.210127Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7439875468754376845:2322], row count: 3, finished: 1 2024-11-21T23:16:34.210180Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7439875468754376845:2322], owner: [11:7439875468754376841:2320], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2024-11-21T23:16:34.212769Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230994205, txId: 281474976710661] shutting down >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteEmptyColumns >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonStorageListingV1 >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |85.8%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> Viewer::Plan2SvgOK [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> TCdcStreamWithInitialScanTests::DropStream >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite >> Viewer::Plan2SvgBad |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2024-11-21T23:16:35.118868Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:35.119885Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:35.119966Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:35.121407Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:35.122539Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:35.122625Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fda/r3tmp/tmpZFeO6I/pdisk_1.dat 2024-11-21T23:16:35.823475Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:471:2455] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1297:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:35.823650Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1297:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.823697Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1297:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.823728Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1297:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.823754Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1297:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.823781Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1297:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.823806Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1297:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.823848Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:1297:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:35.823922Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1297:1] Marker# BPG33 2024-11-21T23:16:35.823968Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1297:1] Marker# BPG32 2024-11-21T23:16:35.824014Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1297:2] Marker# BPG33 2024-11-21T23:16:35.824041Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1297:2] Marker# BPG32 2024-11-21T23:16:35.824075Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1297:3] Marker# BPG33 2024-11-21T23:16:35.824103Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1297:3] Marker# BPG32 2024-11-21T23:16:35.824294Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1297:3] FDS# 1297 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:35.824370Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1297:2] FDS# 1297 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:35.824419Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1297:1] FDS# 1297 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:35.826970Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1297:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90212 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:35.827237Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1297:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90212 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:16:35.827333Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1297:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90212 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:35.827415Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1297:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:35.827502Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1297:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:35.904698Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] bootstrap ActorId# [1:515:2492] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:222:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:35.904860Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.904923Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.904955Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.904979Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.905000Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.905023Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:35.905059Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] restore Id# [72057594037932033:2:8:0:0:222:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:35.905128Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG33 2024-11-21T23:16:35.905174Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG32 2024-11-21T23:16:35.905214Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG33 2024-11-21T23:16:35.905244Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG32 2024-11-21T23:16:35.905275Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG33 2024-11-21T23:16:35.905301Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG32 2024-11-21T23:16:35.905469Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:3] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:35.905576Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:2] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:35.905628Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:1] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:35.913634Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:16:35.913967Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:35.914059Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:35.914136Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:35.914194Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:35.915180Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:16:35.915224Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T23:16:35.915328Z nod ... 16:37.940818Z node 2 :BS_PROXY_PUT NOTICE: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 Sending TEvPut 2024-11-21T23:16:37.941174Z node 2 :BS_PROXY_PUT INFO: [abc2fc901918ac71] bootstrap ActorId# [2:593:2505] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:37.941260Z node 2 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:37.941309Z node 2 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:37.941349Z node 2 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2024-11-21T23:16:37.941386Z node 2 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2024-11-21T23:16:37.941486Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:582:2496] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:37.950073Z node 2 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T23:16:37.950240Z node 2 :BS_PROXY_PUT DEBUG: [abc2fc901918ac71] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2024-11-21T23:16:37.950306Z node 2 :BS_PROXY_PUT INFO: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:37.950972Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:16:37.951023Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T23:16:37.951176Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2024-11-21T23:16:37.951874Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/dl5p/001fda/r3tmp/tmpqd2zP6//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T23:16:37.952850Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T23:16:37.952901Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:16:37.955059Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:597:2104] targetNodeId# 2 Marker# DSP01 2024-11-21T23:16:37.955205Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:598:2105] targetNodeId# 2 Marker# DSP01 2024-11-21T23:16:37.955370Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:599:2106] targetNodeId# 2 Marker# DSP01 2024-11-21T23:16:37.955509Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:600:2107] targetNodeId# 2 Marker# DSP01 2024-11-21T23:16:37.955625Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:601:2108] targetNodeId# 2 Marker# DSP01 2024-11-21T23:16:37.955748Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:602:2109] targetNodeId# 2 Marker# DSP01 2024-11-21T23:16:37.955862Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:595:2103] Create Queue# [3:603:2110] targetNodeId# 2 Marker# DSP01 2024-11-21T23:16:37.955895Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:16:37.957407Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:37.957728Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:37.957796Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:37.957937Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:37.957998Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:37.958307Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:37.958447Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:37.958483Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T23:16:37.958518Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T23:16:37.958685Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [3:606:2111] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T23:16:37.958770Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2024-11-21T23:16:37.958976Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:597:2104] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 7579673777855817645 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T23:16:37.960632Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2024-11-21T23:16:37.960702Z node 3 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2024-11-21T23:16:37.961060Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T23:16:37.961282Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2024-11-21T23:16:37.961665Z node 2 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [2:607:2506] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:37.961820Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:37.961876Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:37.961942Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2024-11-21T23:16:37.961991Z node 2 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2024-11-21T23:16:37.962141Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:582:2496] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:37.962425Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2024-11-21T23:16:37.962738Z node 2 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T23:16:37.962829Z node 2 :BS_PROXY_PUT ERROR: [b6b2c6548553d7a5] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2024-11-21T23:16:37.962887Z node 2 :BS_PROXY_PUT NOTICE: [b6b2c6548553d7a5] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:37.963267Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:597:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2024-11-21T23:16:28.603745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875446051502949:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:28.603813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023ea/r3tmp/tmpdsOu7L/pdisk_1.dat 2024-11-21T23:16:28.963553Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:29.012305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:29.012434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:29.014421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17823 TServer::EnableGrpc on GrpcPort 16745, node 1 2024-11-21T23:16:29.233824Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:29.233845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:29.233853Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:29.233951Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:29.607905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:29.771526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732230989888 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) 2024-11-21T23:16:29.898174Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handshake: worker# [1:7439875450346471007:2397] 2024-11-21T23:16:29.898242Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handshake: worker# [1:7439875450346471007:2397] 2024-11-21T23:16:29.898593Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:16:29.898849Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2024-11-21T23:16:29.898918Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2024-11-21T23:16:29.898930Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7439875450346471007:2397] Handshake with writer: sender# [1:7439875450346471009:2397] 2024-11-21T23:16:29.912022Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Create read session: session# [1:7439875450346471012:2283] 2024-11-21T23:16:29.912110Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2024-11-21T23:16:29.912156Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7439875450346471007:2397] Handshake with reader: sender# [1:7439875450346471008:2397] 2024-11-21T23:16:29.912189Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T23:16:29.954467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2024-11-21T23:16:31.668057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875458936405768:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:31.668213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:31.668521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875458936405786:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:31.668529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875458936405787:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:31.673504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480 2024-11-21T23:16:31.683737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875458936405790:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:16:31.683793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875458936405791:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:16:32.675751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 2024-11-21T23:16:33.198612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:16:33.611706Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875446051502949:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:33.611781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:33.753743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T23:16:34.217112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T23:16:34.643154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:16:35.708060Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 0 Data: 36b Codec: RAW }] } } 2024-11-21T23:16:35.708175Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 36b CreateTime: 2024-11-21T23:16:35Z }] } 2024-11-21T23:16:35.708255Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 0 Data: 36b CreateTime: 2024-11-21T23:16:35Z }] } 2024-11-21T23:16:35.708381Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2024-11-21T23:16:35.709374Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439875476116275749:2397] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2024-11-21T23:16:35.709425Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2024-11-21T23:16:35.709539Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439875476116275749:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T23:16:35.719070Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439875476116275749:2397] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T23:16:35.719176Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2024-11-21T23:16:35.719272Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2024-11-21T23:16:35.719373Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T23:16:35.719453Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T23:16:35.976203Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 1 Data: 36b Codec: RAW }] } } 2024-11-21T23:16:35.976291Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 1 Data: 36b CreateTime: 2024-11-21T23:16:35Z }] } 2024-11-21T23:16:35.976338Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 1 Data: 36b CreateTime: 2024-11-21T23:16:35Z }] } 2024-11-21T23:16:35.976437Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2024-11-21T23:16:35.976551Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439875476116275749:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T23:16:35.978653Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439875476116275749:2397] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T23:16:35.978698Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2024-11-21T23:16:35.978723Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2024-11-21T23:16:35.978750Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T23:16:35.978773Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T23:16:36.140885Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Offset: 2 Data: 36b Codec: RAW }] } } 2024-11-21T23:16:36.140960Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 2 Data: 36b CreateTime: 2024-11-21T23:16:36Z }] } 2024-11-21T23:16:36.141026Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Offset: 2 Data: 36b CreateTime: 2024-11-21T23:16:36Z }] } 2024-11-21T23:16:36.141153Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2024-11-21T23:16:36.141255Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439875476116275749:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2024-11-21T23:16:36.145542Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7439875476116275749:2397] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2024-11-21T23:16:36.145618Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2024-11-21T23:16:36.145674Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7439875450346471009:2397] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2024-11-21T23:16:36.145718Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T23:16:36.145753Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll 2024-11-21T23:16:36.264033Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2024-11-21T23:16:36.264056Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7439875450346471008:2397] Leave 2024-11-21T23:16:36.264104Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7439875450346471007:2397] Reader has gone: sender# [1:7439875450346471008:2397] 2024-11-21T23:16:36.264149Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875480411243233:2397] Handshake: worker# [1:7439875450346471007:2397] 2024-11-21T23:16:36.265019Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875480411243233:2397] Create read session: session# [1:7439875480411243234:2283] 2024-11-21T23:16:36.265062Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7439875450346471007:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2024-11-21T23:16:36.265074Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7439875450346471007:2397] Handshake with reader: sender# [1:7439875480411243233:2397] 2024-11-21T23:16:36.265100Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7439875480411243233:2397] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll |85.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |85.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |85.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |85.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |85.8%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::TopPartitionsRanges [GOOD] Test command err: 2024-11-21T23:15:43.885964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875251581988350:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:43.886055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000ddf/r3tmp/tmp4kI9wA/pdisk_1.dat 2024-11-21T23:15:44.774952Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:44.776212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:44.776286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:44.784302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7925, node 1 2024-11-21T23:15:45.154771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:45.154791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:45.154802Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:45.154889Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:45.858255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:45.891776Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:15:48.866584Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875251581988350:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:48.866699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:49.509333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875277351792598:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.509623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.510081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875277351792633:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:49.514161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:15:49.538832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875277351792635:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:15:50.220075Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jd8g576115r5vygdc1epc5n4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQxMWI5MTgtNWU2NzAyNzAtZWQzZmRkYTctMTU1MTRlYmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:50.348357Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7439875281646760023:2315], owner: [1:7439875281646760019:2313], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T23:15:50.350915Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7439875281646760023:2315], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:15:50.381402Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7439875281646760023:2315], row count: 2, finished: 1 2024-11-21T23:15:50.381472Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7439875281646760023:2315], owner: [1:7439875281646760019:2313], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2024-11-21T23:15:50.395222Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230950218, txId: 281474976710660] shutting down 2024-11-21T23:15:51.891889Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875284317528850:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000ddf/r3tmp/tmpsha7rW/pdisk_1.dat 2024-11-21T23:15:51.929484Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:52.067235Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:52.098443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:52.098577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:52.101581Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64221, node 2 2024-11-21T23:15:52.230933Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:52.230958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:52.230965Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:52.231064Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:52.495140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.532983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:52.551687Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439875289444923750:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:52.572979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:52.574135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:52.579876Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2024-11-21T23:15:52.580240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:52.580285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T23:15:52.582997Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:52.587093Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T23:15:52.590163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:52.688491Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:52.716842Z node 5 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2024-11-21T23:15:52.716931Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2024-11-21T23:15:52.736609Z node 6 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [6:7439875289444923596:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T23:15:52.775779Z node 6 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [6:7439875289444923596:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Tenant1 2024-11-21T23:15:52.777796Z node 6 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [6:7439875289444923596:2055], database# /Root/Tenant1, no sysview processor 2024-11-21T23:15:52.803467Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [5:7439875288191679190:2055], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T23:15:52.803599Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2024-11-21T23:15:52.803656Z node 5 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2024-11-21T23:15:52.804147Z node 5 :SYSTEM_VIEWS DEBUG: [7207518622403 ... S DEBUG: [72075186224037899] PersistQueryTopResults: table id# 9, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:16:32.554023Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 11, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:16:32.554051Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 13, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:16:32.554077Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistQueryTopResults: table id# 15, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:16:32.555422Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxTopPartitions::Complete 2024-11-21T23:16:32.564311Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxAggregate::Complete 2024-11-21T23:16:32.615761Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] TEvApplyCounters: services count# 1 2024-11-21T23:16:32.803412Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jd8g6mnt6gdwzxz5gyvsj08m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2M5ZjdlOTctMTFiZGI2MGItZThlZjJiNC1iODU2YzRjYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:32.806984Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732230991000000 Rank: 0 } InclusiveFrom: false To { IntervalEndUs: 1732230991000000 Rank: 3 } InclusiveTo: false Type: TOP_PARTITIONS_ONE_MINUTE , rows# 2, bytes# 153, next# 2024-11-21T23:16:32.805848Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439875461176759331:2444], owner: [7:7439875461176759328:2442], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T23:16:32.806488Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439875461176759331:2444], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T23:16:32.807252Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439875461176759331:2444], row count: 2, finished: 1 2024-11-21T23:16:32.807295Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439875461176759331:2444], owner: [7:7439875461176759328:2442], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T23:16:32.810142Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230992802, txId: 281474976715681] shutting down 2024-11-21T23:16:32.884649Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:16:32.884678Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:32.913418Z node 9 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded top size# 5, time# 2024-11-21T23:16:32.913255Z 2024-11-21T23:16:32.913657Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxTopPartitions::Execute: partition count# 5 2024-11-21T23:16:32.917459Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxTopPartitions::Complete 2024-11-21T23:16:33.013859Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxCollect::Execute 2024-11-21T23:16:33.014071Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistPartitionTopResults: table id# 17, partition interval end# 2024-11-21T23:16:33.000000Z, partition count# 5 2024-11-21T23:16:33.014226Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] PersistPartitionTopResults: table id# 18, partition interval end# 2024-11-22T00:00:00.000000Z, partition count# 5 2024-11-21T23:16:33.014380Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] Reset: interval end# 2024-11-21T23:16:33.000000Z 2024-11-21T23:16:33.021318Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TTxCollect::Complete 2024-11-21T23:16:33.024394Z node 9 :SYSTEM_VIEWS DEBUG: [72075186224037899] TEvApplyCounters: services count# 1 2024-11-21T23:16:33.044825Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jd8g6mxhfjdyvzrzxptfqpg1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NWEyMzRkNC03YmNhM2MwZi05ZWMwOGUwZC02N2NmNjNhMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:33.051433Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439875465471726671:2456], owner: [7:7439875465471726667:2454], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T23:16:33.052166Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439875465471726671:2456], schemeshard id: 72075186224037888, hive id: 72057594037968897, tenant name: /Root/Tenant1, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], tenant node count: 2 2024-11-21T23:16:33.053068Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439875465471726671:2456], row count: 3, finished: 1 2024-11-21T23:16:33.053112Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439875465471726671:2456], owner: [7:7439875465471726667:2454], scan id: 0, table id: [72075186224037888:1:0:top_partitions_one_minute] 2024-11-21T23:16:33.052795Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reply batch: range# From { IntervalEndUs: 1732230991000000 Rank: 0 } InclusiveFrom: false To { IntervalEndUs: 1732230991000000 Rank: 3 } InclusiveTo: true Type: TOP_PARTITIONS_ONE_MINUTE , rows# 3, bytes# 229, next# 2024-11-21T23:16:33.056384Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230993041, txId: 281474976715683] shutting down 2024-11-21T23:16:33.071580Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2024-11-21T23:16:33.072046Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:33.082513Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7439875399275077060:2055], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T23:16:33.087140Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxCollect::Execute 2024-11-21T23:16:33.086950Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T23:16:33.087415Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 17, partition interval end# 2024-11-21T23:16:33.000000Z, partition count# 5 2024-11-21T23:16:33.087534Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 18, partition interval end# 2024-11-22T00:00:00.000000Z, partition count# 5 2024-11-21T23:16:33.087868Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:33.086801Z node 8 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [8:7439875399275077060:2055], database# /Root/Tenant2, processor id# 72075186224037899 2024-11-21T23:16:33.094081Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2024-11-21T23:16:33.000000Z 2024-11-21T23:16:33.087987Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2024-11-21T23:16:33.092398Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7439875397201462578:2055], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T23:16:33.088352Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:33.092607Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439875397201462578:2055], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T23:16:33.091720Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2024-11-21T23:16:33.092873Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:16:33.097227Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439875397556948262:2107], Type=268959746 2024-11-21T23:16:33.097276Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439875397556948262:2107], Type=268959746 2024-11-21T23:16:33.099012Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439875397556948262:2107], Type=268959746 2024-11-21T23:16:33.099059Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439875397556948262:2107], Type=268959746 2024-11-21T23:16:33.099087Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439875397556948262:2107], Type=268959746 2024-11-21T23:16:33.099114Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[9:7439875397556948262:2107], Type=268959746 2024-11-21T23:16:33.138755Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7439875397280701292:2055], processor id# 72075186224037893, database# /Root/Tenant1 2024-11-21T23:16:33.143276Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7439875397280701292:2055], database# /Root/Tenant1, processor id# 72075186224037893 2024-11-21T23:16:33.199927Z node 9 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [9:7439875397556948028:2055], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T23:16:33.201190Z node 9 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [9:7439875397556948028:2055], database# /Root/Tenant2, processor id# 72075186224037899 2024-11-21T23:16:35.045067Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [8:7439875394980109694:2063] 2024-11-21T23:16:35.133651Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7439875392906495230:2063] 2024-11-21T23:16:35.070493Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [8:7439875399275077060:2055] 2024-11-21T23:16:35.071964Z node 8 :SYSTEM_VIEWS DEBUG: Send counters: service id# [8:7439875399275077060:2055], processor id# 72075186224037899, database# /Root/Tenant2, generation# 16820039315459936540, node id# 8, is retrying# 0, is labeled# 0 2024-11-21T23:16:35.184617Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7439875399275077060:2055], processor id# 72075186224037899, database# /Root/Tenant2 2024-11-21T23:16:35.184910Z node 8 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [8:7439875399275077060:2055], database# /Root/Tenant2, processor id# 72075186224037899 2024-11-21T23:16:35.212209Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [8:7439875399275077060:2055] 2024-11-21T23:16:35.292278Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [8:7439875394980109694:2063] 2024-11-21T23:16:35.310991Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [10:7439875397201462578:2055] 2024-11-21T23:16:36.001128Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [8:7439875399275077060:2055], interval end# 2024-11-21T23:16:36.000000Z, event interval end# 2024-11-21T23:16:36.000000Z 2024-11-21T23:16:36.001164Z node 8 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [8:7439875399275077060:2055], query logs count# 0, processor ids count# 1, processor id to database count# 1 2024-11-21T23:16:36.002526Z node 8 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [8:7439875394980109694:2063], interval end# 2024-11-21T23:16:36.000000Z, event interval end# 2024-11-21T23:16:36.000000Z 2024-11-21T23:16:36.002553Z node 8 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [8:7439875394980109694:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2024-11-21T23:16:39.160583Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.167058Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.169017Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.171945Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.172126Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.172627Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fc5/r3tmp/tmpQFXLBo/pdisk_1.dat 2024-11-21T23:16:39.817904Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:535:2457] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1294:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:39.818062Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.818103Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.818128Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.818152Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.818176Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.818200Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.818236Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:1294:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:39.818300Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1294:1] Marker# BPG33 2024-11-21T23:16:39.818340Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1294:1] Marker# BPG32 2024-11-21T23:16:39.818380Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1294:2] Marker# BPG33 2024-11-21T23:16:39.818431Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1294:2] Marker# BPG32 2024-11-21T23:16:39.818460Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1294:3] Marker# BPG33 2024-11-21T23:16:39.818484Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1294:3] Marker# BPG32 2024-11-21T23:16:39.818643Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:62:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:3] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:39.818716Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:55:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:2] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:39.818755Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:76:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:1] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:39.831240Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:39.831619Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:16:39.831750Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:39.831834Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:39.831904Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:39.903924Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] bootstrap ActorId# [1:579:2494] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:222:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:39.904109Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.904152Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.904178Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.904203Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.904229Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.904254Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.904287Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] restore Id# [72057594037932033:2:8:0:0:222:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:39.904357Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG33 2024-11-21T23:16:39.904401Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG32 2024-11-21T23:16:39.904438Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG33 2024-11-21T23:16:39.904466Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG32 2024-11-21T23:16:39.904497Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG33 2024-11-21T23:16:39.904520Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG32 2024-11-21T23:16:39.904670Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:62:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:3] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:39.904732Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:55:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:2] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:39.904776Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:76:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:1] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:39.907888Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:39.908134Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:16:39.908258Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:39.908343Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:39.908414Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 Sending TEvPut 2024-11-21T23:16:39.909552Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:16:39.909601Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T23:1 ... lse Marker# DSP02 2024-11-21T23:16:39.951926Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:16:39.955772Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:580:2495] Create Queue# [1:584:2498] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.955915Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:580:2495] Create Queue# [1:585:2499] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.956022Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:580:2495] Create Queue# [1:586:2500] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.956126Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:580:2495] Create Queue# [1:587:2501] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.956248Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:580:2495] Create Queue# [1:588:2502] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.956363Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:580:2495] Create Queue# [1:589:2503] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.956466Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:580:2495] Create Queue# [1:590:2504] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.956492Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:16:39.957294Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.957418Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.957539Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.957651Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.957858Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.957981Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.958038Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.958066Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T23:16:39.958097Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T23:16:39.958144Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2024-11-21T23:16:39.961729Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:593:2505] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:39.961880Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:39.961926Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:39.961980Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2024-11-21T23:16:39.962017Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2024-11-21T23:16:39.962149Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:584:2498] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:39.972334Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2024-11-21T23:16:39.972453Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2024-11-21T23:16:39.972505Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:39.973026Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:16:39.973083Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T23:16:39.973222Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2024-11-21T23:16:39.973747Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:207} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/dl5p/001fc5/r3tmp/tmpQFXLBo//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2024-11-21T23:16:39.974609Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2024-11-21T23:16:39.974664Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:16:39.977037Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:597:2104] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.977429Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:598:2105] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.977594Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:599:2106] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.977714Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:600:2107] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.977843Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:601:2108] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.977963Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:602:2109] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.978081Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:595:2103] Create Queue# [2:603:2110] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:39.978110Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:16:39.979873Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.980004Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.980185Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.980403Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.980474Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.980533Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.980774Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:39.980809Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T23:16:39.980845Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T23:16:39.981048Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:597:2104] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2024-11-21T23:14:37.694906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439874969952670195:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:37.695145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpFiboWk/pdisk_1.dat 2024-11-21T23:14:38.790736Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:14:38.955432Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:38.955660Z node 1 :HTTP ERROR: (#27,[::1]:27702) connection closed with error: Connection refused 2024-11-21T23:14:38.981528Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:14:38.984719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:38.984789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:38.996175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:44.180171Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439874997759617925:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:44.180209Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpdvcRnW/pdisk_1.dat 2024-11-21T23:14:44.572384Z node 2 :HTTP ERROR: (#32,[::1]:4989) connection closed with error: Connection refused 2024-11-21T23:14:44.578731Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:14:44.578877Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:44.607451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:44.607560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:44.609017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:50.630569Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875025923484160:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:50.653012Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpjBTgRL/pdisk_1.dat 2024-11-21T23:14:50.897338Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:50.919141Z node 3 :HTTP ERROR: (#30,[::1]:12097) connection closed with error: Connection refused 2024-11-21T23:14:50.926977Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:14:50.927837Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:50.927920Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:50.932618Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:55.547791Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439875046301797522:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpr90JkK/pdisk_1.dat 2024-11-21T23:14:55.690588Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:14:55.787030Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:55.813460Z node 4 :HTTP ERROR: (#32,[::1]:16730) connection closed with error: Connection refused 2024-11-21T23:14:55.818739Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:14:55.820025Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:55.820110Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:55.827657Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:14:59.590709Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875060407165044:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:14:59.590796Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpugiphD/pdisk_1.dat 2024-11-21T23:14:59.762676Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:59.772790Z node 5 :HTTP ERROR: (#34,[::1]:9520) connection closed with error: Connection refused 2024-11-21T23:14:59.773216Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:14:59.777502Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:14:59.777578Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:14:59.781878Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:03.157300Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439875079192730775:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:03.157380Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmppcAmdp/pdisk_1.dat 2024-11-21T23:15:03.253280Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:03.278268Z node 6 :HTTP ERROR: (#33,[::1]:27498) connection closed with error: Connection refused 2024-11-21T23:15:03.278719Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:15:03.293162Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:03.293259Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:03.294994Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:06.761604Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7439875091356499224:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:06.761676Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpQiCPeP/pdisk_1.dat 2024-11-21T23:15:06.852733Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:06.867156Z node 7 :HTTP ERROR: (#38,[::1]:4137) connection closed with error: Connection refused 2024-11-21T23:15:06.867423Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:15:06.892960Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:06.893059Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:06.894228Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:10.316759Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439875107577641738:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:10.316834Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmp6RShpF/pdisk_1.dat 2024-11-21T23:15:10.419333Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:10.441596Z node 8 :HTTP ERROR: (#40,[::1]:61492) connection closed with error: Connection refused 2024-11-21T23:15:10.442826Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:15:10.445655Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:10.445760Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:10.447398Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:14.214076Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7439875125986679645:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:14.214161Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpqFVcEv/pdisk_1.dat 2024-11-21T23:15:14.327182Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:14.366891Z node 9 :HTTP ERROR: (#42,[::1]:29510) connection closed with error: Connection refused 2024-11-21T23:15:14.367826Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:15:14.369833Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:14.369919Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:14.372359Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:18.149877Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7439875145076660179:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:18.149961Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpGACdfl/pdisk_1.dat 2024-11-21T23:15:18.227333Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:18.251357Z node 10 :HTTP ERROR: (#30,[::1]:27924) connection closed with error: Connection refused ... cheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpMszWeL/pdisk_1.dat 2024-11-21T23:15:35.107546Z node 14 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:35.136304Z node 14 :HTTP ERROR: (#38,[::1]:13712) connection closed with error: Connection refused 2024-11-21T23:15:35.136758Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:15:35.139188Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:35.139298Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:35.141501Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:42.052432Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7439875248932303857:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:42.052501Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpo3iz27/pdisk_1.dat 2024-11-21T23:15:42.247957Z node 15 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:42.267680Z node 15 :HTTP ERROR: (#40,[::1]:32681) connection closed with error: Connection refused 2024-11-21T23:15:42.269022Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:15:42.286957Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:42.287087Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:42.290579Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:49.275717Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7439875277326724667:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpcJYa4M/pdisk_1.dat 2024-11-21T23:15:49.426117Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:49.538029Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:49.540496Z node 16 :HTTP ERROR: (#42,[::1]:6063) connection closed with error: Connection refused 2024-11-21T23:15:49.540566Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:15:49.543724Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:49.543897Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:49.545717Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:54.951384Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7439875300230484986:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:54.951470Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpHsZUsa/pdisk_1.dat 2024-11-21T23:15:55.232361Z node 17 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:55.232498Z node 17 :HTTP ERROR: (#30,[::1]:29809) connection closed with error: Connection refused 2024-11-21T23:15:55.242880Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:15:55.252859Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:55.252993Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:55.255365Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:00.872597Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7439875324690356062:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:00.872668Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpIMvfPE/pdisk_1.dat 2024-11-21T23:16:01.142380Z node 18 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:01.157403Z node 18 :HTTP ERROR: (#32,[::1]:23704) connection closed with error: Connection refused 2024-11-21T23:16:01.158456Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:16:01.181166Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:01.181291Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:01.182911Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:07.374963Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7439875356071110747:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:07.375009Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpuZMm5q/pdisk_1.dat 2024-11-21T23:16:07.655408Z node 19 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:07.667989Z node 19 :HTTP ERROR: (#34,[::1]:19963) connection closed with error: Connection refused 2024-11-21T23:16:07.673338Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:16:07.686470Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:07.686627Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:07.688107Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:13.628085Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7439875381229059574:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:13.628958Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmppfiuGp/pdisk_1.dat 2024-11-21T23:16:13.800119Z node 20 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:13.827750Z node 20 :HTTP ERROR: (#36,[::1]:12617) connection closed with error: Connection refused 2024-11-21T23:16:13.828437Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:16:13.848758Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:13.848890Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:13.850955Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:19.674813Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7439875404159269938:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:19.674906Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpAr1H6n/pdisk_1.dat 2024-11-21T23:16:19.889086Z node 21 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:19.909417Z node 21 :HTTP ERROR: (#38,[::1]:23681) connection closed with error: Connection refused 2024-11-21T23:16:19.909873Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:16:19.914572Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:19.914707Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:19.917143Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:25.794950Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7439875430532594749:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:25.795031Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmpy8opiU/pdisk_1.dat 2024-11-21T23:16:26.003538Z node 22 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:26.026890Z node 22 :HTTP ERROR: (#40,[::1]:1534) connection closed with error: Connection refused 2024-11-21T23:16:26.027392Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:16:26.028763Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:26.028890Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:26.031749Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:31.729169Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7439875456659405751:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:31.729251Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002390/r3tmp/tmp3eCXSz/pdisk_1.dat 2024-11-21T23:16:31.893848Z node 23 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:31.926325Z node 23 :HTTP ERROR: (#42,[::1]:8085) connection closed with error: Connection refused 2024-11-21T23:16:31.926781Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2024-11-21T23:16:31.929108Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:31.929242Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:31.931133Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2024-11-21T23:16:39.580301Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.585114Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.586645Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.589149Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.589287Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:39.589680Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fc0/r3tmp/tmpXgOWuH/pdisk_1.dat Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2024-11-21T23:16:40.239257Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/001fc0/r3tmp/tmpoAP9ES//new_pdisk.dat": no such file. PDiskId# 1001 2024-11-21T23:16:40.239801Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/001fc0/r3tmp/tmpoAP9ES//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/001fc0/r3tmp/tmpoAP9ES//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10384135716728540695 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1001 2024-11-21T23:16:40.260791Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:534:2456] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:356:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:40.260920Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:356:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:40.260952Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:356:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:40.260970Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:356:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:40.260988Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:356:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:40.261010Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:356:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:40.261026Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:356:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:40.261052Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:356:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:40.261100Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:356:1] Marker# BPG33 2024-11-21T23:16:40.261131Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:356:1] Marker# BPG32 2024-11-21T23:16:40.261180Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:356:2] Marker# BPG33 2024-11-21T23:16:40.261197Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:356:2] Marker# BPG32 2024-11-21T23:16:40.261215Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:356:3] Marker# BPG33 2024-11-21T23:16:40.261230Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:356:3] Marker# BPG32 2024-11-21T23:16:40.261333Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:62:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:356:3] FDS# 356 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:40.261374Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:55:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:356:2] FDS# 356 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:40.261402Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:76:2103] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:356:1] FDS# 356 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:40.272633Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:356:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:40.272832Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:356:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:16:40.272911Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:356:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 82803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:40.272977Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:356:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:40.273027Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:356:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 |85.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> TStateStorageTest::ShouldDeleteNoCheckpoints >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams >> TCheckpointStorageTest::ShouldCreateCheckpoint |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |85.8%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2024-11-21T23:16:42.122329Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:42.123833Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:42.123931Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:42.125403Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:42.126546Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:42.126635Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001fbc/r3tmp/tmpo0sdB7/pdisk_1.dat 2024-11-21T23:16:42.753041Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:471:2455] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1299:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:42.753230Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1299:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.753271Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1299:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.753295Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1299:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.753317Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1299:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.753339Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1299:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.753364Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1299:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.753395Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:1299:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:42.753467Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1299:1] Marker# BPG33 2024-11-21T23:16:42.753514Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1299:1] Marker# BPG32 2024-11-21T23:16:42.753551Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1299:2] Marker# BPG33 2024-11-21T23:16:42.753575Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1299:2] Marker# BPG32 2024-11-21T23:16:42.753604Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1299:3] Marker# BPG33 2024-11-21T23:16:42.753629Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1299:3] Marker# BPG32 2024-11-21T23:16:42.753788Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1299:3] FDS# 1299 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.753850Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1299:2] FDS# 1299 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.753891Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1299:1] FDS# 1299 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.767050Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1299:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90228 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:42.767305Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1299:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90228 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:16:42.767403Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1299:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90228 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:42.767489Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1299:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:42.767547Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1299:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:42.840574Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] bootstrap ActorId# [1:515:2492] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:222:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:42.840758Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.840796Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.840818Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.840844Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.840866Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.840884Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Id# [72057594037932033:2:8:0:0:222:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.840917Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] restore Id# [72057594037932033:2:8:0:0:222:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:42.840991Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG33 2024-11-21T23:16:42.841031Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:222:1] Marker# BPG32 2024-11-21T23:16:42.841071Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG33 2024-11-21T23:16:42.841097Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:222:2] Marker# BPG32 2024-11-21T23:16:42.841138Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG33 2024-11-21T23:16:42.841162Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:222:3] Marker# BPG32 2024-11-21T23:16:42.841331Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:3] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.841421Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:2] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.841464Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:222:1] FDS# 222 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.848841Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:42.849050Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:16:42.849135Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:222:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 81748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:42.849210Z node 1 :BS_PROXY_PUT DEBUG: [c8d415ebd9884d79] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:42.849274Z node 1 :BS_PROXY_PUT INFO: [c8d415ebd9884d79] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:222:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:42.850304Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:16:42.850349Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2024-11-21T23:16:42.850447Z nod ... ] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.884805Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:9:0:0:241:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.884827Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:9:0:0:241:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:42.884857Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:9:0:0:241:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:42.884945Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:241:1] Marker# BPG33 2024-11-21T23:16:42.884999Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:241:1] Marker# BPG32 2024-11-21T23:16:42.885037Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:241:2] Marker# BPG33 2024-11-21T23:16:42.885063Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:241:2] Marker# BPG32 2024-11-21T23:16:42.885091Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:241:3] Marker# BPG33 2024-11-21T23:16:42.885116Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:241:3] Marker# BPG32 2024-11-21T23:16:42.885270Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:3] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.885337Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:2] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.885380Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:51:2095] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:241:1] FDS# 241 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:42.889066Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:42.889348Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2024-11-21T23:16:42.889471Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:241:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81897 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:42.889568Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:9:0:0:241:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:42.889652Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:9:0:0:241:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:42.891954Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2024-11-21T23:16:42.892001Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2024-11-21T23:16:42.893749Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:520:2496] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:42.893871Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:521:2497] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:42.893962Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:522:2498] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:42.894066Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:523:2499] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:42.894176Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:524:2500] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:42.894676Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:525:2501] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:42.894784Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:516:2493] Create Queue# [1:526:2502] targetNodeId# 1 Marker# DSP01 2024-11-21T23:16:42.894816Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2024-11-21T23:16:42.895673Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:42.895785Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:42.895955Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:42.896058Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:42.896238Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:42.896293Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:42.896365Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524256 GType# none}} Duration# 0.000000s Marker# DSP04 2024-11-21T23:16:42.896388Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2024-11-21T23:16:42.896416Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2024-11-21T23:16:42.896545Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] bootstrap ActorId# [1:529:2503] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T23:16:42.896601Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T23:16:42.896808Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 10784142398753829489 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T23:16:42.906980Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T23:16:42.907063Z node 1 :BS_PROXY_BLOCK DEBUG: [5a9a1d6240d04444] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2024-11-21T23:16:42.907435Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2024-11-21T23:16:42.909451Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2024-11-21T23:16:42.910147Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] bootstrap ActorId# [1:531:2505] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T23:16:42.910207Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T23:16:42.910381Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 16702343236371012445 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T23:16:42.911495Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T23:16:42.911575Z node 1 :BS_PROXY_BLOCK DEBUG: [abc2fc901918ac71] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2024-11-21T23:16:42.912022Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] bootstrap ActorId# [1:532:2506] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2024-11-21T23:16:42.912086Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2024-11-21T23:16:42.912292Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:520:2496] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 304681040183600663 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2024-11-21T23:16:42.913162Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2024-11-21T23:16:42.913222Z node 1 :BS_PROXY_BLOCK DEBUG: [3ca1a99c83a6f037] Result# TEvBlockResult {Status# OK} Marker# DSPB04 >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency >> KqpIndexes::UpsertWithNullKeysSimple >> KqpUniqueIndex::UpdateFkSameValue |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 >> TBlobStorageWardenTest::TestSendToInvalidGroupId |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |85.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> BsControllerConfig::MergeBoxes [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> TStorageServiceTest::ShouldCreateCheckpoint >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> Viewer::Plan2SvgBad [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TPersQueueMirrorer::TestBasicRemote >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TCheckpointStorageTest::ShouldRegisterCoordinator >> KqpIndexes::UniqAndNoUniqSecondaryIndex >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:10914:2156] recipient: [1:10713:2165] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:10914:2156] recipient: [1:10713:2165] Leader for TabletID 72057594037932033 is [1:10916:2167] sender: [1:10917:2156] recipient: [1:10713:2165] 2024-11-21T23:15:26.727663Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:26.732927Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:26.736109Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:26.736619Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2024-11-21T23:15:26.737480Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:1977} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:26.737511Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:283} Handle TEvInterconnect::TEvNodesInfo 2024-11-21T23:15:26.737823Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2024-11-21T23:15:26.747162Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2024-11-21T23:15:26.747298Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:182} Execute tx 2024-11-21T23:15:26.747429Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:239} Complete tx IncompatibleData# false 2024-11-21T23:15:26.747565Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:26.747673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2024-11-21T23:15:26.747776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:10916:2167] sender: [1:10942:2156] recipient: [1:110:2157] 2024-11-21T23:15:26.761936Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2024-11-21T23:15:26.762083Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:26.773079Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2024-11-21T23:15:26.773234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:26.773338Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2024-11-21T23:15:26.773432Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:26.773536Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2024-11-21T23:15:26.773604Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:26.773639Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2024-11-21T23:15:26.773695Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:26.786062Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2024-11-21T23:15:26.786626Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:15} TTxLoadEverything Execute 2024-11-21T23:15:26.791944Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:511} TTxLoadEverything Complete 2024-11-21T23:15:26.792019Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2157} LoadFinished 2024-11-21T23:15:26.792744Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:513} TTxLoadEverything InitQueue processed 2024-11-21T23:15:26.807765Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12097 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12098 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12099 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12100 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 800 PDiskFilter { Property { Type: ROT } } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 800 PDiskFilter { Property { Type: SSD } } } } Command { QueryBaseConfig { ... 5} Create new pdisk PDiskId# 295:1001 Path# /dev/disk1 2024-11-21T23:16:35.747460Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1000 Path# /dev/disk2 2024-11-21T23:16:35.747492Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 293:1002 Path# /dev/disk2 2024-11-21T23:16:35.747707Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1001 Path# /dev/disk1 2024-11-21T23:16:35.747770Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1000 Path# /dev/disk3 2024-11-21T23:16:35.747804Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1000 Path# /dev/disk3 2024-11-21T23:16:35.747837Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2024-11-21T23:16:35.747890Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2024-11-21T23:16:35.747930Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 290:1002 Path# /dev/disk1 2024-11-21T23:16:35.747961Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1000 Path# /dev/disk3 2024-11-21T23:16:35.748006Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2024-11-21T23:16:35.748047Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1001 Path# /dev/disk1 2024-11-21T23:16:35.748078Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1000 Path# /dev/disk3 2024-11-21T23:16:35.748109Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 291:1002 Path# /dev/disk1 2024-11-21T23:16:35.748155Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2024-11-21T23:16:35.748191Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 289:1002 Path# /dev/disk2 2024-11-21T23:16:35.748222Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 288:1002 Path# /dev/disk1 2024-11-21T23:16:35.748252Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1000 Path# /dev/disk3 2024-11-21T23:16:35.748286Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2024-11-21T23:16:35.748317Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2024-11-21T23:16:35.748346Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 287:1002 Path# /dev/disk1 2024-11-21T23:16:35.748373Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1000 Path# /dev/disk3 2024-11-21T23:16:35.748418Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2024-11-21T23:16:35.748455Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1001 Path# /dev/disk1 2024-11-21T23:16:35.748492Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1000 Path# /dev/disk3 2024-11-21T23:16:35.748520Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2024-11-21T23:16:35.748564Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2024-11-21T23:16:35.748601Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 285:1002 Path# /dev/disk1 2024-11-21T23:16:35.748630Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1000 Path# /dev/disk3 2024-11-21T23:16:35.748658Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2024-11-21T23:16:35.748687Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2024-11-21T23:16:35.748716Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 284:1002 Path# /dev/disk1 2024-11-21T23:16:35.748754Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1000 Path# /dev/disk3 2024-11-21T23:16:35.748783Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2024-11-21T23:16:35.748816Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2024-11-21T23:16:35.748895Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 283:1002 Path# /dev/disk1 2024-11-21T23:16:35.748941Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1000 Path# /dev/disk3 2024-11-21T23:16:35.748981Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1000 Path# /dev/disk3 2024-11-21T23:16:35.749013Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2024-11-21T23:16:35.749046Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 282:1002 Path# /dev/disk1 2024-11-21T23:16:35.749077Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1000 Path# /dev/disk3 2024-11-21T23:16:35.749109Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2024-11-21T23:16:35.749152Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2024-11-21T23:16:35.749189Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 281:1002 Path# /dev/disk1 2024-11-21T23:16:35.749218Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1001 Path# /dev/disk2 2024-11-21T23:16:35.749247Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1000 Path# /dev/disk3 2024-11-21T23:16:35.749276Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2024-11-21T23:16:35.749310Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2024-11-21T23:16:35.749339Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 280:1002 Path# /dev/disk1 2024-11-21T23:16:35.749380Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1000 Path# /dev/disk3 2024-11-21T23:16:35.749429Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2024-11-21T23:16:35.749465Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 279:1002 Path# /dev/disk1 2024-11-21T23:16:35.749507Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1000 Path# /dev/disk3 2024-11-21T23:16:35.749543Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2024-11-21T23:16:35.749574Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1000 Path# /dev/disk1 2024-11-21T23:16:35.749604Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1000 Path# /dev/disk3 2024-11-21T23:16:35.749652Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2024-11-21T23:16:35.749684Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1000 Path# /dev/disk3 2024-11-21T23:16:35.749726Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1000 Path# /dev/disk1 2024-11-21T23:16:35.749792Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1000 Path# /dev/disk3 2024-11-21T23:16:35.749842Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1001 Path# /dev/disk2 2024-11-21T23:16:35.749877Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1000 Path# /dev/disk1 2024-11-21T23:16:35.749928Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1001 Path# /dev/disk3 2024-11-21T23:16:35.749961Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 275:1002 Path# /dev/disk1 2024-11-21T23:16:35.750012Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1001 Path# /dev/disk3 2024-11-21T23:16:35.750043Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1001 Path# /dev/disk3 2024-11-21T23:16:35.750089Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 278:1002 Path# /dev/disk1 2024-11-21T23:16:35.750121Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 274:1002 Path# /dev/disk2 2024-11-21T23:16:35.750153Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2024-11-21T23:16:35.750198Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 277:1002 Path# /dev/disk1 2024-11-21T23:16:35.750230Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 273:1002 Path# /dev/disk2 2024-11-21T23:16:35.750271Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1001 Path# /dev/disk1 2024-11-21T23:16:35.750325Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 286:1002 Path# /dev/disk2 2024-11-21T23:16:35.750363Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 271:1002 Path# /dev/disk1 2024-11-21T23:16:35.750428Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 272:1002 Path# /dev/disk2 2024-11-21T23:16:35.750483Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:355} Create new pdisk PDiskId# 276:1002 Path# /dev/disk2 2024-11-21T23:16:35.972794Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.228402s 2024-11-21T23:16:35.972973Z node 251 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.228608s 2024-11-21T23:16:35.987984Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2024-11-21T23:16:36.008346Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:395} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumns >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2024-11-21T23:16:45.186599Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.187838Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.187928Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.189434Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.190904Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.191008Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001f73/r3tmp/tmpOoMVeD/pdisk_1.dat 2024-11-21T23:16:45.820172Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] bootstrap ActorId# [1:471:2455] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:7:0:0:1294:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2024-11-21T23:16:45.820355Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:45.820403Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:45.820432Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:45.820459Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:45.820488Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:45.820513Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Id# [72057594037932033:2:7:0:0:1294:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2024-11-21T23:16:45.820548Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] restore Id# [72057594037932033:2:7:0:0:1294:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2024-11-21T23:16:45.820621Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:7:0:0:1294:1] Marker# BPG33 2024-11-21T23:16:45.820665Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:7:0:0:1294:1] Marker# BPG32 2024-11-21T23:16:45.820710Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:7:0:0:1294:2] Marker# BPG33 2024-11-21T23:16:45.820736Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:7:0:0:1294:2] Marker# BPG32 2024-11-21T23:16:45.820766Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:7:0:0:1294:3] Marker# BPG33 2024-11-21T23:16:45.820794Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:7:0:0:1294:3] Marker# BPG32 2024-11-21T23:16:45.820976Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:44:2088] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:3] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:45.821045Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:37:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:2] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:45.821093Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2102] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:7:0:0:1294:1] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2024-11-21T23:16:45.836651Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2024-11-21T23:16:45.836887Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2024-11-21T23:16:45.836983Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037932033:2:7:0:0:1294:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 7 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 8 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2024-11-21T23:16:45.837063Z node 1 :BS_PROXY_PUT DEBUG: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037932033:2:7:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2024-11-21T23:16:45.837136Z node 1 :BS_PROXY_PUT INFO: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:7:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2024-11-21T23:16:45.866637Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2024-11-21T23:16:45.868635Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2024-11-21T23:16:45.868922Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2024-11-21T23:16:45.869062Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2024-11-21T23:16:44.163026Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:44.164256Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:44.164366Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:44.165921Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:44.167078Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:44.167155Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001f8b/r3tmp/tmpXzGsfJ/pdisk_1.dat 2024-11-21T23:16:45.498753Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.501874Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.502164Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.503694Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.503777Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.505024Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2024-11-21T23:16:45.505119Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001f8b/r3tmp/tmpafEutA/pdisk_1.dat |85.9%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: 2024-11-21T23:16:19.610571Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875404083548810:2074];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:19.610632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:16:20.292375Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:20.325937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:20.326032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:20.328570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10870, node 1 2024-11-21T23:16:20.448408Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:20.448451Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:20.448463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:20.448616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:20.881754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:20.908154Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:16:20.936833Z node 1 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T23:16:23.443530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875421263418645:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:23.443630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:23.444239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875421263418657:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:23.450080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:16:23.462754Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:16:23.463020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875421263418659:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:16:23.974506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:16:24.611673Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875404083548810:2074];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:24.611742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:28.450862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2024-11-21T23:16:28.452788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 2024-11-21T23:16:28.454073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2024-11-21T23:16:31.273177Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T23:16:31.273487Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439875455623158391:2667] TxId: 281474976710720. Ctx: { TraceId: 01jd8g6k2n1fs2yb7ejktkjcz7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgzYmU4Y2QtNDRhOTcyYTEtNWZlMGJiNjEtNWFkYTFkOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T23:16:31.306534Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439875455623158398:2672], TxId: 281474976710720, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MTgzYmU4Y2QtNDRhOTcyYTEtNWZlMGJiNjEtNWFkYTFkOWY=. TraceId : 01jd8g6k2n1fs2yb7ejktkjcz7. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439875455623158391:2667], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T23:16:31.307869Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTgzYmU4Y2QtNDRhOTcyYTEtNWZlMGJiNjEtNWFkYTFkOWY=, ActorId: [1:7439875451328191072:2667], ActorState: ExecuteState, TraceId: 01jd8g6k2n1fs2yb7ejktkjcz7, Create QueryResponse for error on request, msg: 2024-11-21T23:16:31.308384Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732230991232, txId: 281474976710719] shutting down 2024-11-21T23:16:33.250465Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875464391157907:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:33.250577Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:16:33.406248Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:33.426739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:33.426842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:33.431051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13197, node 2 2024-11-21T23:16:33.501285Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:33.501328Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:33.501341Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:33.501515Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12157 2024-11-21T23:16:33.844393Z node 2 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T23:16:39.304097Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875490308245742:2082];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:39.304862Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:16:39.570622Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:39.589244Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:39.589347Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:39.590855Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29007, node 3 2024-11-21T23:16:39.671069Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:39.671099Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:39.671115Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:39.671291Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9814 2024-11-21T23:16:40.027342Z node 3 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |85.9%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |85.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit |85.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints |85.9%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteScheme >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> TSchemeShardSubDomainTest::RestartAtInFly >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered >> TStorageServiceTest::ShouldUseGc [FAIL] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |86.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:49.708425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:49.708514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:49.708563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:49.708605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:49.708653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:49.708693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:49.708749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:49.709115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:49.799133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:49.799196Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:49.828764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:49.832782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:49.833608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:49.861753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:49.865559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:49.866270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:49.870861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:49.880165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:49.881710Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:49.881776Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:49.881990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:49.882036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:49.882076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:49.882214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:49.890264Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:50.138960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:50.139233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.139491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:50.139769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:50.139840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.143532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:50.143719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:50.143947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.144040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:50.144087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:50.144126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:50.155605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.155688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:50.155732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:50.159637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.159704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.159748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:50.159818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:50.163754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:50.175678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:50.175955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:50.177069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:50.177242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:50.177294Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:50.177632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:50.177703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:50.177907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:50.178014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:50.183764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:50.183835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:50.184067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:50.184118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:50.184594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.184649Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:50.184776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:50.184810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:50.184881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:50.184950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:50.184990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:50.185020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:50.185109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:50.185165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:50.185208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:50.187763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:50.187897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:50.188040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:50.188100Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:50.188144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:50.188253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 72057594046678944 is [1:121:2147] sender: [1:461:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:462:2058] recipient: [1:460:2413] Leader for TabletID 72057594046678944 is [1:463:2414] sender: [1:464:2058] recipient: [1:460:2413] 2024-11-21T23:16:50.513948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:50.514062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:50.514105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:50.514168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:50.514208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:50.514238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:50.514312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:50.514788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:50.552054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:50.553302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:50.553456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:50.553526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:50.553577Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:50.554346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:50.555167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2024-11-21T23:16:50.555279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:16:50.555374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.555438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.555724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2024-11-21T23:16:50.555997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.556238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.556346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.556470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2024-11-21T23:16:50.556556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:16:50.556608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:16:50.556628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:16:50.556729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.556814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.557057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2024-11-21T23:16:50.557411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.557525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.557943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.558022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.558250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.558340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.559140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.559375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.559503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.559668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.559891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.560006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.560071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.560135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.566959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:50.567014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:50.567731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:50.567774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:50.567815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:50.569319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2024-11-21T23:16:50.618924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2024-11-21T23:16:50.618993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:463:2414] sender: [1:524:2058] recipient: [1:15:2062] 2024-11-21T23:16:50.619789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2024-11-21T23:16:50.619917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2024-11-21T23:16:50.619957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:522:2461] TestWaitNotification: OK eventTxId 100 2024-11-21T23:16:50.620431Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:50.620633Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 237us result status StatusSuccess 2024-11-21T23:16:50.621048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:50.621563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:50.621735Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 147us result status StatusSuccess 2024-11-21T23:16:50.622055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ReadOnlyVDisk::TestStorageLoad >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |86.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> TStateStorageTest::ShouldDeleteGraph ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:50.221493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:50.221582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:50.221634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:50.221673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:50.221717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:50.221748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:50.221814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:50.222165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:50.300744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:50.300808Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:50.341967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:50.351612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:50.351831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:50.388679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:50.389850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:50.390605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:50.391010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:50.409885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:50.411582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:50.411659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:50.411948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:50.412017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:50.412069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:50.412178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.448213Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:50.775814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:50.776107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.776396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:50.777084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:50.777163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.780228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:50.780364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:50.780590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.780667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:50.780715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:50.780748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:50.783366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.783443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:50.783514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:50.786123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.786173Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.786215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:50.786282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:50.793101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:50.796565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:50.796861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:50.797974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:50.798135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:50.798183Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:50.798587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:50.798662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:50.798863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:50.798975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:50.802089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:50.802147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:50.802361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:50.802424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:50.802816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:50.802874Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:50.802981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:50.803016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:50.803070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:50.803135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:50.803179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:50.803211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:50.803290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:50.803329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:50.803363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:50.805857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:50.805996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:50.806045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:50.806104Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:50.806146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:50.806259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... meshard# 72057594046678944 2024-11-21T23:16:51.057405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409551, partId: 0 2024-11-21T23:16:51.057490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2024-11-21T23:16:51.057528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2024-11-21T23:16:51.057563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId#101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2024-11-21T23:16:51.057605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2024-11-21T23:16:51.075971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:51.077526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:51.077665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:51.077801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:51.077844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:51.077902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet 72057594046678944 2024-11-21T23:16:51.077953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2024-11-21T23:16:51.078118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:51.080593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2024-11-21T23:16:51.080783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2024-11-21T23:16:51.081140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:51.081322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:51.081372Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T23:16:51.081736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2024-11-21T23:16:51.081797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet 72057594046678944 2024-11-21T23:16:51.081974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:51.082048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T23:16:51.082116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2024-11-21T23:16:51.092050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:51.092130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:51.092346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:16:51.092472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:51.092516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2024-11-21T23:16:51.092562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2024-11-21T23:16:51.092866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2024-11-21T23:16:51.092917Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2024-11-21T23:16:51.093020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2024-11-21T23:16:51.093065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:16:51.093147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2024-11-21T23:16:51.093217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2024-11-21T23:16:51.093261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2024-11-21T23:16:51.093292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2024-11-21T23:16:51.093564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2024-11-21T23:16:51.093614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2024-11-21T23:16:51.093652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2024-11-21T23:16:51.093685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2024-11-21T23:16:51.112719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:51.112895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:51.112945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:16:51.112986Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2024-11-21T23:16:51.113027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:51.113962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:51.114060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2024-11-21T23:16:51.114093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2024-11-21T23:16:51.114121Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2024-11-21T23:16:51.114146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2024-11-21T23:16:51.114208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2024-11-21T23:16:51.114251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:559:2469] 2024-11-21T23:16:51.131474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:16:51.131659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2024-11-21T23:16:51.131711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2024-11-21T23:16:51.131752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:560:2470] TestWaitNotification: OK eventTxId 101 2024-11-21T23:16:51.132294Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2024-11-21T23:16:51.132543Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 240us result status StatusSuccess 2024-11-21T23:16:51.133002Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> ReadOnlyVDisk::TestDiscover >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> TStateStorageTest::ShouldDeleteGraph [GOOD] |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |86.0%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> TStateStorageTest::ShouldGetMultipleStates >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> ReadOnlyVDisk::TestWrites |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |86.0%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:15.735073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:15.735175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:15.735275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:15.735315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:15.735359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:15.735393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:15.735488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:15.735841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:15.811871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:15.811937Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:15.855397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:15.859568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:15.859837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:15.883784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:15.891324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:15.892145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:15.892531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:15.918602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:15.920122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:15.920190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:15.920435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:15.920486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:15.920526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:15.920650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:15.950725Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:16.156671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:16.156964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.157232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:16.157491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:16.157556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.160531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:16.160697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:16.160946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.161037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:16.161082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:16.161119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:16.163479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.163546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:16.163586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:16.171875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.171961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.172041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:16.172122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:16.176374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:16.179356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:16.179635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:16.180752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:16.180904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:16.180961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:16.181287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:16.181348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:16.181552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:16.181661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:16.184162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:16.184213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:16.184498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:16.184550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:16.184910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.184975Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:16.185077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:16.185109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:16.185161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:16.185218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:16.185256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:16.185287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:16.185363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:16.185405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:16.185442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:16.187729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:16.187867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:16.187918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:16.187962Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:16.188015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:16.188132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T23:16:48.900886Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2024-11-21T23:16:48.901266Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2024-11-21T23:16:48.901308Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2024-11-21T23:16:48.901378Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2024-11-21T23:16:48.901407Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2024-11-21T23:16:48.901444Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2024-11-21T23:16:48.901982Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T23:16:48.902161Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T23:16:48.902240Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2024-11-21T23:16:48.902318Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2024-11-21T23:16:48.902459Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2024-11-21T23:16:48.903020Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T23:16:48.903101Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2024-11-21T23:16:48.903132Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2024-11-21T23:16:48.903164Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2024-11-21T23:16:48.903203Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2024-11-21T23:16:48.903281Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2024-11-21T23:16:48.910546Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2024-11-21T23:16:48.910754Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2024-11-21T23:16:48.925698Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1507 } } 2024-11-21T23:16:48.925787Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2024-11-21T23:16:48.925955Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1507 } } 2024-11-21T23:16:48.926133Z node 18 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 300 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1507 } } 2024-11-21T23:16:48.927811Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 751 RawX2: 77309413971 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T23:16:48.927927Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2024-11-21T23:16:48.928199Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 751 RawX2: 77309413971 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T23:16:48.928327Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2024-11-21T23:16:48.928506Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 751 RawX2: 77309413971 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2024-11-21T23:16:48.928646Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2024-11-21T23:16:48.928738Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T23:16:48.928808Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2024-11-21T23:16:48.928886Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2024-11-21T23:16:48.936331Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T23:16:48.936939Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T23:16:48.937439Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2024-11-21T23:16:48.937508Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2024-11-21T23:16:48.937790Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2024-11-21T23:16:48.937863Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T23:16:48.937959Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2024-11-21T23:16:48.938041Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2024-11-21T23:16:48.938135Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2024-11-21T23:16:48.938206Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2024-11-21T23:16:48.938334Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2024-11-21T23:16:48.938430Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2024-11-21T23:16:48.938457Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2024-11-21T23:16:48.938563Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2024-11-21T23:16:48.938617Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2024-11-21T23:16:48.938642Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2024-11-21T23:16:48.938678Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2024-11-21T23:16:51.599197Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T23:16:51.599721Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 572us result status StatusNameConflict 2024-11-21T23:16:51.600056Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T23:16:54.051716Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2024-11-21T23:16:54.052230Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 563us result status StatusNameConflict 2024-11-21T23:16:54.052499Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [FAIL] Test command err: 2024-11-21T23:16:13.958034Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7439875376056548009:2048] with connection to localhost:61274:local 2024-11-21T23:16:13.958205Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:14.823676Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:14.823705Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:16.157036Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7439875387377541478:2048] with connection to localhost:61274:local 2024-11-21T23:16:16.157120Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:16.450633Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:16.450663Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:16.450956Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:16.720697Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2024-11-21T23:16:16.720731Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:16.723282Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:16.939739Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2024-11-21T23:16:16.939777Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:17.922678Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7439875397073799606:2048] with connection to localhost:61274:local 2024-11-21T23:16:17.922759Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:18.263571Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:18.263606Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:18.274497Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:19.981339Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:19.981375Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:19.985360Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:16:20.834993Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2024-11-21T23:16:20.835025Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:16:20.835642Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T23:16:22.150580Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2024-11-21T23:16:22.150617Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T23:16:22.151751Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:16:22.851289Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2024-11-21T23:16:22.851327Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:16:22.853365Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2024-11-21T23:16:23.686581Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2024-11-21T23:16:23.686624Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2024-11-21T23:16:23.690141Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:23.975090Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:25.309470Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7439875428003805292:2048] with connection to localhost:61274:local 2024-11-21T23:16:25.309566Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:25.625926Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:25.625959Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:25.626437Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:27.177410Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:27.177455Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:27.207010Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2024-11-21T23:16:27.426963Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2024-11-21T23:16:27.427496Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2024-11-21T23:16:28.811864Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7439875442892307855:2048] with connection to localhost:61274:local 2024-11-21T23:16:28.811890Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7439875442892307956:2128] 2024-11-21T23:16:28.811929Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:29.040250Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:29.040279Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:29.040822Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:30.480328Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:30.480360Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:30.480683Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:16:31.080540Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2024-11-21T23:16:31.080577Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:16:31.085246Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2024-11-21T23:16:31.646980Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2024-11-21T23:16:31.647030Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2024-11-21T23:16:31.647063Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2024-11-21T23:16:31.650160Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2024-11-21T23:16:31.662496Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T23:16:32.557119Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2024-11-21T23:16:32.587984Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2024-11-21T23:16:32.588019Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T23:16:32.590249Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:16:32.961376Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2024-11-21T23:16:32.961411Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:16:32.961686Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2024-11-21T23:16:33.375645Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2024-11-21T23:16:33.375682Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2024-11-21T23:16:33.375714Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2024-11-21T23:16:33.375786Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2024-11-21T23:16:33.377563Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2024-11-21T23:16:33.905409Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2024-11-21T23:16:34.245294Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2024-11-21T23:16:34.245326Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2024-11-21T23:16:34.245621Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:16:34.823357Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2024-11-21T23:16:34.823400Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:16:34.823773Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2024-11-21T23:16:35.777663Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2024-11-21T23:16:35.777721Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2024-11-21T23:16:35.777758Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2024-11-21T23:16:35.777879Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2024-11-21T23:16:35.778311Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:36.118606Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:36.225164Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:36.331082Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:36.414144Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2024-11-21T23:16:36.437657Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:36.464727Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:36.566577Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_g ... .027612Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:43.039529Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:43.142895Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:43.158734Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:43.260728Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:43.274732Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:43.378865Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:43.398435Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:43.502482Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:43.518498Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:43.627383Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:43.659039Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:43.762940Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:43.822952Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:43.926655Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:43.949288Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:44.054994Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:44.078960Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:44.182290Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:44.241252Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:44.350498Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:44.387073Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:44.489645Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:44.514202Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:44.619152Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:44.645847Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:44.747137Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:44.765097Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:44.866410Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:44.899086Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:45.002464Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:45.018019Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:45.121759Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:45.142988Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:45.244264Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:45.267590Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:45.375268Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:45.400140Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:45.503001Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:45.537394Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:45.638663Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:45.653766Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:45.758496Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:45.790386Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:45.894627Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:45.928185Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:46.031263Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:46.073822Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:46.176599Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:46.199609Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:46.303967Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:46.336647Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:46.440030Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:46.473077Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:46.577484Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:46.603811Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:46.706953Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:46.734708Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:46.843045Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:46.867676Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:46.970526Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:46.999025Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:47.110509Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:47.130036Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:47.238537Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:47.268213Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:47.374979Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:47.418966Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:47.525141Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:47.539073Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:47.640962Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:47.659685Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:47.762795Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:47.777068Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:47.879116Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:47.910540Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:48.014912Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:48.025772Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:48.134492Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:48.167209Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:48.270787Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:48.304027Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:48.410551Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:48.427849Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:48.531009Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:48.557352Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:48.660773Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:48.685750Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:48.790493Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:48.806434Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:48.914587Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:48.934997Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:49.039146Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:49.059043Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:49.167060Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:49.187064Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:16:49.293715Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:49.324823Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse (yexception) gc not finished yet >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |86.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> ReadOnlyVDisk::TestDiscover [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint >> AutoConfig::GetASPoolsWith3CPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 16687006739688348797 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T23:16:55.028620Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T23:16:55.333210Z 1 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T23:16:55.334353Z 2 00h02m00.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2024-11-21T23:16:55.617664Z 3 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5298:708] 2024-11-21T23:16:55.622887Z 1 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5284:694] 2024-11-21T23:16:55.623728Z 2 00h02m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5291:701] 2024-11-21T23:16:55.623917Z 1 00h02m30.160512s :BS_PROXY_PUT ERROR: [68fb615940ddc4cb] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} >> Viewer::JsonAutocompleteColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [FAIL] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions |86.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |86.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndex [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] >> ReadOnlyVDisk::TestWrites [GOOD] >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::SelectStringWithNoBase64Encoding >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteSchemePOST >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 14089162982696495573 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2024-11-21T23:16:57.065778Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2024-11-21T23:16:57.072586Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2024-11-21T23:16:57.084854Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2024-11-21T23:16:57.088982Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2024-11-21T23:16:57.100775Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2024-11-21T23:16:57.104777Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2024-11-21T23:16:57.108870Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2024-11-21T23:16:57.114311Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2024-11-21T23:16:59.602041Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T23:16:59.602200Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:16:59.602363Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T23:16:59.603367Z 1 00h03m30.111536s :BS_PROXY_PUT ERROR: [4f2908130d2148a2] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2024-11-21T23:16:59.605565Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T23:16:59.605751Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T23:16:59.607200Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2024-11-21T23:16:59.609467Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T23:16:59.610614Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T23:16:59.611782Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2024-11-21T23:16:59.613323Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:16:59.615009Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T23:16:59.615840Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:15:0:0:32768:0] 2024-11-21T23:16:59.617137Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:16:59.617238Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T23:16:59.618359Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2024-11-21T23:16:59.620698Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:16:59.620804Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T23:16:59.622061Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2024-11-21T23:16:59.623981Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T23:16:59.624258Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:16:59.624333Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2024-11-21T23:16:59.626998Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T23:16:59.627256Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T23:16:59.627390Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2024-11-21T23:16:59.630336Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T23:16:59.630645Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:16:59.630753Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED" ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2024-11-21T23:16:59.633451Z 1 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5283:694] 2024-11-21T23:16:59.633583Z 3 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:16:59.633709Z 2 00h03m30.111536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] ... 2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2024-11-21T23:17:03.166107Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:17:03.166280Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2024-11-21T23:17:03.169993Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T23:17:03.173584Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2024-11-21T23:17:03.178010Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2024-11-21T23:17:03.181058Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:17:03.181178Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2024-11-21T23:17:03.184015Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:17:03.184121Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2024-11-21T23:17:03.187194Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:17:03.187286Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2024-11-21T23:17:03.190007Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] 2024-11-21T23:17:03.190284Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2024-11-21T23:17:03.194175Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:17:03.194289Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2024-11-21T23:17:03.196979Z 3 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5297:708] 2024-11-21T23:17:03.197114Z 2 00h08m00.211536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5290:701] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 14932, MsgBus: 25012 2024-11-21T23:16:49.497385Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875534050549573:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:49.497429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001405/r3tmp/tmpoliDUk/pdisk_1.dat 2024-11-21T23:16:50.214377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:50.214484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:50.220394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:50.344947Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14932, node 1 2024-11-21T23:16:50.623005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:50.623043Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:50.623050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:50.623154Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25012 TClient is connected to server localhost:25012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:51.805149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:51.830624Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:16:51.853716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:52.052208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:52.333891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:52.430558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:54.498545Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875534050549573:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:54.498629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:54.856207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875555525387620:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:54.856337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:55.285063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:55.378635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:55.427954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:55.525735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:55.628710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:55.769134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:55.825707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875559820355428:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:55.825786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:55.825997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875559820355433:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:55.829904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:55.846900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875559820355435:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:16:56.979903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:01.019911Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g7fbf74en14x32yq77jv1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTA1ZmRiZGMtNzJjOGM3NGMtZjJjNTk4ZTUtNjg5ZTdiNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:17:01.047435Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTA1ZmRiZGMtNzJjOGM3NGMtZjJjNTk4ZTUtNjg5ZTdiNDg=, ActorId: [1:7439875568410291094:2524], ActorState: ExecuteState, TraceId: 01jd8g7fbf74en14x32yq77jv1, Create QueryResponse for error on request, msg: >> TColumnShardTestSchema::HotTiers >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |86.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> TColumnShardTestSchema::RebootHotTiersWithStat >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |86.1%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |86.1%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] >> TColumnShardTestSchema::ColdTiersWithStat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [FAIL] Test command err: 2024-11-21T23:16:31.498041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:639:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:31.498577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:31.498900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:31.499490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:637:2326], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:31.499568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:31.499607Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:16:31.899290Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:32.059465Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T23:16:32.078133Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T23:16:32.562894Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 4980, node 1 TClient is connected to server localhost:10942 2024-11-21T23:16:32.845000Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:32.845050Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:32.845074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:32.845371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:16:36.240573Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875478293177416:2066];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:36.240664Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:16:36.445432Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:36.450252Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:36.450380Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:36.454312Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19493, node 3 2024-11-21T23:16:36.542044Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:36.542075Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:36.542089Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:36.542306Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:36.964436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:36.987728Z node 3 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T23:16:39.957422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875491178079958:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:39.957637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:39.957908Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875491178079970:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:39.962553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:16:39.974608Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875491178079972:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:16:40.322350Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzYxMjdhNDEtNjlmMjBmMWYtN2ZlZjhmYWItN2NiMDhmZGM=, ActorId: [3:7439875491178079932:2336], ActorState: ExecuteState, TraceId: 01jd8g6vwj9gca7ph08c7nnk7e, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction 2024-11-21T23:16:42.890997Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7439875506767885571:2064];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:42.891093Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:16:43.082899Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:43.120124Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:43.120246Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:43.125316Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14910, node 4 2024-11-21T23:16:43.285859Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:43.285886Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:43.285898Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:43.286077Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14619 2024-11-21T23:16:43.754386Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999890s 2024-11-21T23:16:43.754583Z node 4 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T23:16:47.895024Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439875506767885571:2064];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:47.895100Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:48.243408Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875532537689981:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:48.243524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:48.305659Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875532537689994:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:48.305758Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:48.305818Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875532537689999:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:48.313905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:16:48.324411Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:16:48.324503Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:16:48.324513Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:16:48.324540Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:16:48.337032Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:16:48.337118Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:16:48.337126Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2024-11-21T23:16:48.337155Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no SecurityState 2024-11-21T23:16:48.339534Z node 4 :KQP_WORKLOAD_SERVICE WARN: [W ... ok> peer# 2024-11-21T23:16:57.573389Z node 5 :GRPC_SERVER DEBUG: [0x51b00047b280] received request Name# Coordination/CreateNode ok# false data# peer# 2024-11-21T23:16:57.573645Z node 5 :GRPC_SERVER DEBUG: [0x51b00047ab80] received request Name# Coordination/AlterNode ok# false data# peer# 2024-11-21T23:16:57.573896Z node 5 :GRPC_SERVER DEBUG: [0x51b00047a480] received request Name# Coordination/DropNode ok# false data# peer# 2024-11-21T23:16:57.574175Z node 5 :GRPC_SERVER DEBUG: [0x51b000479d80] received request Name# Coordination/DescribeNode ok# false data# peer# 2024-11-21T23:16:57.578855Z node 5 :GRPC_SERVER DEBUG: [0x51b000479680] received request Name# CreateDatabase ok# false data# peer# 2024-11-21T23:16:57.579194Z node 5 :GRPC_SERVER DEBUG: [0x51b000478880] received request Name# GetDatabaseStatus ok# false data# peer# 2024-11-21T23:16:57.579484Z node 5 :GRPC_SERVER DEBUG: [0x51b000478f80] received request Name# AlterDatabase ok# false data# peer# 2024-11-21T23:16:57.579743Z node 5 :GRPC_SERVER DEBUG: [0x51b000478180] received request Name# ListDatabases ok# false data# peer# 2024-11-21T23:16:57.580002Z node 5 :GRPC_SERVER DEBUG: [0x51b000477a80] received request Name# RemoveDatabase ok# false data# peer# 2024-11-21T23:16:57.580286Z node 5 :GRPC_SERVER DEBUG: [0x51b000477380] received request Name# DescribeDatabaseOptions ok# false data# peer# 2024-11-21T23:16:57.580564Z node 5 :GRPC_SERVER DEBUG: [0x51b000476c80] received request Name# GetScaleRecommendation ok# false data# peer# 2024-11-21T23:16:57.580821Z node 5 :GRPC_SERVER DEBUG: [0x51b000475780] received request Name# ListEndpoints ok# false data# peer# 2024-11-21T23:16:57.581084Z node 5 :GRPC_SERVER DEBUG: [0x51b000476580] received request Name# WhoAmI ok# false data# peer# 2024-11-21T23:16:57.581334Z node 5 :GRPC_SERVER DEBUG: [0x51b000475e80] received request Name# NodeRegistration ok# false data# peer# 2024-11-21T23:16:57.581595Z node 5 :GRPC_SERVER DEBUG: [0x51b000475080] received request Name# Scan ok# false data# peer# 2024-11-21T23:16:57.581879Z node 5 :GRPC_SERVER DEBUG: [0x51b000474980] received request Name# GetShardLocations ok# false data# peer# 2024-11-21T23:16:57.582140Z node 5 :GRPC_SERVER DEBUG: [0x51b000474280] received request Name# DescribeTable ok# false data# peer# 2024-11-21T23:16:57.594606Z node 5 :GRPC_SERVER DEBUG: [0x51b000473480] received request Name# RefreshSnapshot ok# false data# peer# 2024-11-21T23:16:57.594997Z node 5 :GRPC_SERVER DEBUG: [0x51b000472d80] received request Name# DiscardSnapshot ok# false data# peer# 2024-11-21T23:16:57.595279Z node 5 :GRPC_SERVER DEBUG: [0x51b000472680] received request Name# List ok# false data# peer# 2024-11-21T23:16:57.595557Z node 5 :GRPC_SERVER DEBUG: [0x51b000471f80] received request Name# RateLimiter/CreateResource ok# false data# peer# 2024-11-21T23:16:57.595819Z node 5 :GRPC_SERVER DEBUG: [0x51b000471880] received request Name# RateLimiter/AlterResource ok# false data# peer# 2024-11-21T23:16:57.596087Z node 5 :GRPC_SERVER DEBUG: [0x51b000471180] received request Name# RateLimiter/DropResource ok# false data# peer# 2024-11-21T23:16:57.596349Z node 5 :GRPC_SERVER DEBUG: [0x51b000470a80] received request Name# RateLimiter/ListResources ok# false data# peer# 2024-11-21T23:16:57.596609Z node 5 :GRPC_SERVER DEBUG: [0x51b000470380] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2024-11-21T23:16:57.596869Z node 5 :GRPC_SERVER DEBUG: [0x51b00046fc80] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2024-11-21T23:16:57.597145Z node 5 :GRPC_SERVER DEBUG: [0x51b00046ee80] received request Name# CreateStream ok# false data# peer# 2024-11-21T23:16:57.597413Z node 5 :GRPC_SERVER DEBUG: [0x51b00046e780] received request Name# ListStreams ok# false data# peer# 2024-11-21T23:16:57.597673Z node 5 :GRPC_SERVER DEBUG: [0x51b00046e080] received request Name# DeleteStream ok# false data# peer# 2024-11-21T23:16:57.597946Z node 5 :GRPC_SERVER DEBUG: [0x51b00046f580] received request Name# DescribeStream ok# false data# peer# 2024-11-21T23:16:57.598199Z node 5 :GRPC_SERVER DEBUG: [0x51b00046d980] received request Name# ListShards ok# false data# peer# 2024-11-21T23:16:57.610603Z node 5 :GRPC_SERVER DEBUG: [0x51b00046a180] received request Name# SetWriteQuota ok# false data# peer# 2024-11-21T23:16:57.610989Z node 5 :GRPC_SERVER DEBUG: [0x51b000522480] received request Name# UpdateStream ok# false data# peer# 2024-11-21T23:16:57.611286Z node 5 :GRPC_SERVER DEBUG: [0x51b00046d280] received request Name# PutRecord ok# false data# peer# 2024-11-21T23:16:57.611585Z node 5 :GRPC_SERVER DEBUG: [0x51b00046cb80] received request Name# PutRecords ok# false data# peer# 2024-11-21T23:16:57.611821Z node 5 :GRPC_SERVER DEBUG: [0x51b00046c480] received request Name# GetRecords ok# false data# peer# 2024-11-21T23:16:57.612096Z node 5 :GRPC_SERVER DEBUG: [0x51b00046bd80] received request Name# GetShardIterator ok# false data# peer# 2024-11-21T23:16:57.612341Z node 5 :GRPC_SERVER DEBUG: [0x51b00046b680] received request Name# SubscribeToShard ok# false data# peer# 2024-11-21T23:16:57.612676Z node 5 :GRPC_SERVER DEBUG: [0x51b00046a880] received request Name# DescribeLimits ok# false data# peer# 2024-11-21T23:16:57.612939Z node 5 :GRPC_SERVER DEBUG: [0x51b0004ab480] received request Name# DescribeStreamSummary ok# false data# peer# 2024-11-21T23:16:57.613205Z node 5 :GRPC_SERVER DEBUG: [0x51b0004aad80] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2024-11-21T23:16:57.613469Z node 5 :GRPC_SERVER DEBUG: [0x51b0004b0180] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2024-11-21T23:16:57.613728Z node 5 :GRPC_SERVER DEBUG: [0x51b0004aa680] received request Name# UpdateShardCount ok# false data# peer# 2024-11-21T23:16:57.613960Z node 5 :GRPC_SERVER DEBUG: [0x51b0004a9f80] received request Name# UpdateStreamMode ok# false data# peer# 2024-11-21T23:16:57.614202Z node 5 :GRPC_SERVER DEBUG: [0x51b000491780] received request Name# RegisterStreamConsumer ok# false data# peer# 2024-11-21T23:16:57.614489Z node 5 :GRPC_SERVER DEBUG: [0x51b000473b80] received request Name# CreateSnapshot ok# false data# peer# 2024-11-21T23:16:57.614794Z node 5 :GRPC_SERVER DEBUG: [0x51b000490980] received request Name# DeregisterStreamConsumer ok# false data# peer# 2024-11-21T23:16:57.615103Z node 5 :GRPC_SERVER DEBUG: [0x51b000490280] received request Name# DescribeStreamConsumer ok# false data# peer# 2024-11-21T23:16:57.615376Z node 5 :GRPC_SERVER DEBUG: [0x51b00048f480] received request Name# ListStreamConsumers ok# false data# peer# 2024-11-21T23:16:57.615671Z node 5 :GRPC_SERVER DEBUG: [0x51b00048ed80] received request Name# AddTagsToStream ok# false data# peer# 2024-11-21T23:16:57.615934Z node 5 :GRPC_SERVER DEBUG: [0x51b00048df80] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2024-11-21T23:16:57.616174Z node 5 :GRPC_SERVER DEBUG: [0x51b00048d880] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2024-11-21T23:16:57.616423Z node 5 :GRPC_SERVER DEBUG: [0x51b00048ca80] received request Name# ListTagsForStream ok# false data# peer# 2024-11-21T23:16:57.616685Z node 5 :GRPC_SERVER DEBUG: [0x51b00048c380] received request Name# MergeShards ok# false data# peer# 2024-11-21T23:16:57.616971Z node 5 :GRPC_SERVER DEBUG: [0x51b00048b580] received request Name# RemoveTagsFromStream ok# false data# peer# 2024-11-21T23:16:57.617207Z node 5 :GRPC_SERVER DEBUG: [0x51b00048ae80] received request Name# SplitShard ok# false data# peer# 2024-11-21T23:16:57.626662Z node 5 :GRPC_SERVER DEBUG: [0x51b000489980] received request Name# StopStreamEncryption ok# false data# peer# 2024-11-21T23:16:57.626662Z node 5 :GRPC_SERVER DEBUG: [0x51b00048a080] received request Name# StartStreamEncryption ok# false data# peer# 2024-11-21T23:16:57.627010Z node 5 :GRPC_SERVER DEBUG: [0x51b000469380] received request Name# NodeCheck ok# false data# peer# 2024-11-21T23:16:57.627035Z node 5 :GRPC_SERVER DEBUG: [0x51b000469a80] received request Name# SelfCheck ok# false data# peer# 2024-11-21T23:16:57.627273Z node 5 :GRPC_SERVER DEBUG: [0x51b000467080] received request Name# CreateSession ok# false data# peer# 2024-11-21T23:16:57.627289Z node 5 :GRPC_SERVER DEBUG: [0x51b000466980] received request Name# DeleteSession ok# false data# peer# 2024-11-21T23:16:57.627536Z node 5 :GRPC_SERVER DEBUG: [0x51b000539080] received request Name# AttachSession ok# false data# peer# 2024-11-21T23:16:57.627580Z node 5 :GRPC_SERVER DEBUG: [0x51b00053a580] received request Name# BeginTransaction ok# false data# peer# 2024-11-21T23:16:57.627809Z node 5 :GRPC_SERVER DEBUG: [0x51b00053ba80] received request Name# CommitTransaction ok# false data# peer# 2024-11-21T23:16:57.627844Z node 5 :GRPC_SERVER DEBUG: [0x51b00053c880] received request Name# RollbackTransaction ok# false data# peer# 2024-11-21T23:16:57.628039Z node 5 :GRPC_SERVER DEBUG: [0x51b000468c80] received request Name# ExecuteQuery ok# false data# peer# 2024-11-21T23:16:57.628089Z node 5 :GRPC_SERVER DEBUG: [0x51b000467e80] received request Name# ExecuteScript ok# false data# peer# 2024-11-21T23:16:57.628278Z node 5 :GRPC_SERVER DEBUG: [0x51b000467780] received request Name# FetchScriptResults ok# false data# peer# 2024-11-21T23:16:57.628329Z node 5 :GRPC_SERVER DEBUG: [0x51b00053cf80] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2024-11-21T23:16:57.628534Z node 5 :GRPC_SERVER DEBUG: [0x51b00053dd80] received request Name# ChangeTabletSchema ok# false data# peer# 2024-11-21T23:16:57.628558Z node 5 :GRPC_SERVER DEBUG: [0x51b000541c80] received request Name# RestartTablet ok# false data# peer# 2024-11-21T23:16:57.628778Z node 5 :GRPC_SERVER DEBUG: [0x51b00053e480] received request Name# CreateLogStore ok# false data# peer# 2024-11-21T23:16:57.628811Z node 5 :GRPC_SERVER DEBUG: [0x51b000540780] received request Name# DescribeLogStore ok# false data# peer# 2024-11-21T23:16:57.629019Z node 5 :GRPC_SERVER DEBUG: [0x51b000540e80] received request Name# DropLogStore ok# false data# peer# 2024-11-21T23:16:57.629044Z node 5 :GRPC_SERVER DEBUG: [0x51b000543180] received request Name# AlterLogStore ok# false data# peer# 2024-11-21T23:16:57.629256Z node 5 :GRPC_SERVER DEBUG: [0x51b000543880] received request Name# CreateLogTable ok# false data# peer# 2024-11-21T23:16:57.629285Z node 5 :GRPC_SERVER DEBUG: [0x51b000548580] received request Name# DescribeLogTable ok# false data# peer# 2024-11-21T23:16:57.629490Z node 5 :GRPC_SERVER DEBUG: [0x51b000509580] received request Name# DropLogTable ok# false data# peer# 2024-11-21T23:16:57.629506Z node 5 :GRPC_SERVER DEBUG: [0x51b000544680] received request Name# AlterLogTable ok# false data# peer# 2024-11-21T23:16:57.629718Z node 5 :GRPC_SERVER DEBUG: [0x51b000547780] received request Name# Login ok# false data# peer# 2024-11-21T23:16:57.629734Z node 5 :GRPC_SERVER DEBUG: [0x51b000548c80] received request Name# DescribeReplication ok# false data# peer# (TSystemError) (Error 11: Resource temporarily unavailable) util/network/socket.cpp:910: can not read from socket input stream >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 27573, MsgBus: 26657 2024-11-21T23:16:44.475582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875514887205768:2168];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:44.475653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001411/r3tmp/tmpmpKFr9/pdisk_1.dat 2024-11-21T23:16:44.956304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:44.956384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:44.962758Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:44.966057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27573, node 1 2024-11-21T23:16:45.155068Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:45.155104Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:45.155110Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:45.155207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26657 TClient is connected to server localhost:26657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:45.871836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:45.887108Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:45.914547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:16:46.122896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:46.296008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:46.378814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:48.777172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875532067076549:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:48.777378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:48.817823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:48.852131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:48.900161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:48.977100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.018937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.120468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.186321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875536362044350:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:49.186419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:49.186636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875536362044355:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:49.191370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:49.206705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875536362044357:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:16:49.460575Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875514887205768:2168];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:49.460649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:50.573559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:16:50.780871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2024-11-21T23:16:50.908908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2024-11-21T23:16:51.669268Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 7699, MsgBus: 11406 2024-11-21T23:16:54.270781Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875557694530810:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:54.270841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001411/r3tmp/tmpzllWes/pdisk_1.dat 2024-11-21T23:16:54.492659Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:54.556940Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:54.557043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7699, node 2 2024-11-21T23:16:54.568818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:54.839038Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:54.839065Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:54.839073Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:54.839180Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11406 TClient is connected to server localhost:11406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:55.678737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:55.690683Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:16:55.696284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:55.794341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:56.123125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:56.236839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:59.274311Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875557694530810:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:59.274414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:59.348695Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875579169368779:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:59.348775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:59.428164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:59.526780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:59.616447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:59.739874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:59.896949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:00.037220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:00.143758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875583464336585:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:00.143860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:00.144091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875583464336590:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:00.148289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:00.170681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875583464336592:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:17:02.065262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:03.302191Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:17:03.355414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2024-11-21T23:17:03.471045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.135495Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:17:05.867680Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:17:05.933676Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> TPersQueueMirrorer::TestBasicRemote [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> CdcStreamChangeCollector::UpsertManyRows >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] >> AsyncIndexChangeCollector::DeleteNothing >> TSubDomainTest::FailIfAffectedSetNotInterior >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 61773, MsgBus: 26304 2024-11-21T23:16:47.240245Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875525111123637:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:47.240293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001409/r3tmp/tmp5932qy/pdisk_1.dat 2024-11-21T23:16:47.932266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:47.932584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:47.933109Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:47.943043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61773, node 1 2024-11-21T23:16:48.154547Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:48.154567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:48.154578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:48.154666Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26304 TClient is connected to server localhost:26304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:48.868075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:48.886139Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:16:48.897656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:16:49.047414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.277857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:49.370100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:52.323136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875546585961839:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.323508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.336663Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875525111123637:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:52.336782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:52.368557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.411268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.451967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.502842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.558450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.624662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.734738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875546585962340:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.734832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.735162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875546585962345:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.739735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:52.777048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875546585962347:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:16:54.154417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:16:54.275089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14344, MsgBus: 25952 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001409/r3tmp/tmpO3Bnho/pdisk_1.dat 2024-11-21T23:16:58.435645Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875572150334449:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:58.625275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:16:58.786178Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:58.802914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:58.803033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:58.808167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14344, node 2 2024-11-21T23:16:59.172036Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:59.172069Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:59.172078Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:59.172212Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25952 TClient is connected to server localhost:25952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:00.520311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:00.529937Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:00.543619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:00.655908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:00.882653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:01.014944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:03.177978Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875572150334449:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:03.178063Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:04.175428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875597920139756:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:04.175528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:04.236200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.289457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.372187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.445246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.560172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.622204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.734965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875597920140262:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:04.735078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:04.735581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875597920140267:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:04.740741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:04.763739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875597920140269:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:06.488372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:06.582473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower >> TSchemeShardServerLess::StorageBilling [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2024-11-21T23:16:47.968370Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7439875522933708490:2048] with connection to localhost:2905:local 2024-11-21T23:16:47.968493Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:48.806489Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:48.806515Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:48.809350Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:49.232650Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2024-11-21T23:16:49.232684Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:50.976515Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7439875533911349754:2048] with connection to localhost:2905:local 2024-11-21T23:16:50.976612Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:52.273774Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2024-11-21T23:16:52.273805Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:54.106513Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7439875545858345825:2048] with connection to localhost:2905:local 2024-11-21T23:16:54.106613Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:54.435760Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:54.435797Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:54.437115Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:56.656514Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:56.656560Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:56.658222Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:58.923963Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Conflict with existing key., code: 2012 2024-11-21T23:16:58.923994Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:17:01.146466Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7439875582467667705:2048] with connection to localhost:2905:local 2024-11-21T23:17:01.146626Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:17:01.402655Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:17:01.402688Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:17:01.403116Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:17:02.183708Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2024-11-21T23:17:02.183754Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:17:04.408989Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7439875595347814803:2048] with connection to localhost:2905:local 2024-11-21T23:17:04.409108Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:17:04.906614Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:17:04.906647Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:17:04.908698Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:17:07.197297Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:17:07.197335Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:17:07.206551Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2024-11-21T23:17:07.534740Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2024-11-21T23:17:07.534773Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2024-11-21T23:17:07.535139Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:17:07.860844Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2024-11-21T23:17:07.860887Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::TestBasicRemote [GOOD] Test command err: 2024-11-21T23:15:28.601966Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875187470870867:2120];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.603106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000eee/r3tmp/tmpAVIabD/pdisk_1.dat 2024-11-21T23:15:28.798327Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:28.946653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3347, node 1 2024-11-21T23:15:29.005422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.010722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.030568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/000eee/r3tmp/yandexW45G7q.tmp 2024-11-21T23:15:29.030581Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/000eee/r3tmp/yandexW45G7q.tmp 2024-11-21T23:15:29.030760Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/000eee/r3tmp/yandexW45G7q.tmp 2024-11-21T23:15:29.030854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.031719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:29.092360Z INFO: TTestServer started on Port 17756 GrpcPort 3347 TClient is connected to server localhost:17756 PQClient connected to localhost:3347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:29.380071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:29.399650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:29.419012Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:15:29.425821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:29.614639Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:29.625276Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:15:31.591307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875200355773450:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:31.591445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:31.593370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875200355773470:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:31.597754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:15:31.708306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875200355773472:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:32.003907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.036241Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875200355773547:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:32.037753Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGNjNTEyYTgtOTUxM2Y3ZjktYmI4MGVjMDktYjBkMjBhMDg=, ActorId: [1:7439875200355773446:2305], ActorState: ExecuteState, TraceId: 01jd8g4s3s6gkjvxh2axw7x127, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:32.040199Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:32.071674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.238878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439875204650741126:2625] 2024-11-21T23:15:33.600737Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875187470870867:2120];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.600804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:15:39.356598Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T23:15:39.397053Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:15:39.399085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439875234715512506:2800], Recipient [1:7439875187470871199:2194]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:39.399133Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:39.399170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T23:15:39.399218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439875234715512502:2797], Recipient [1:7439875187470871199:2194]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2024-11-21T23:15:39.399240Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:15:39.462782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:15:39.463365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:15:39.463712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T23:15:39.463757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T23:15:39.463791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T23:15:39.463823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T23:15:39.463839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T23:15:39.463854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount r ... v1 sending to client partition status 2024-11-21T23:17:06.687109Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/some_user session shared/some_user_5_1_14086099591004378101_v1 grpc read done: success# 0, data# { } 2024-11-21T23:17:06.687162Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/some_user session shared/some_user_5_1_14086099591004378101_v1 grpc read failed 2024-11-21T23:17:06.687187Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/some_user session shared/some_user_5_1_14086099591004378101_v1 grpc closed 2024-11-21T23:17:06.687228Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/some_user session shared/some_user_5_1_14086099591004378101_v1 is DEAD 2024-11-21T23:17:06.687744Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/some_user session shared/some_user_5_2_5051355683096620703_v1 grpc read done: success# 0, data# { } 2024-11-21T23:17:06.687761Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_5051355683096620703_v1 grpc read failed 2024-11-21T23:17:06.687776Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_5051355683096620703_v1 grpc closed 2024-11-21T23:17:06.687806Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/some_user session shared/some_user_5_2_5051355683096620703_v1 is DEAD 2024-11-21T23:17:06.692357Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic2] pipe [5:7439875590183531516:2503] disconnected; active server actors: 1 2024-11-21T23:17:06.692413Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic2] pipe [5:7439875590183531516:2503] client some_user disconnected session shared/some_user_5_1_14086099591004378101_v1 2024-11-21T23:17:06.692498Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic2] consumer some_user rebalancing was scheduled 2024-11-21T23:17:06.692541Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic2] pipe [5:7439875590183531521:2504] disconnected; active server actors: 1 2024-11-21T23:17:06.692557Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic2] pipe [5:7439875590183531521:2504] client some_user disconnected session shared/some_user_5_2_5051355683096620703_v1 2024-11-21T23:17:06.726884Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:17:06.726951Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/some_user_5_1_14086099591004378101_v1 2024-11-21T23:17:06.726992Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439875590183531531:2515] destroyed 2024-11-21T23:17:06.727019Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:17:06.727036Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/some_user_5_2_5051355683096620703_v1 2024-11-21T23:17:06.727058Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439875590183531532:2516] destroyed 2024-11-21T23:17:06.727100Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/some_user_5_1_14086099591004378101_v1 2024-11-21T23:17:06.727122Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/some_user_5_2_5051355683096620703_v1 2024-11-21T23:17:06.791078Z node 5 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] [] Got error. Status: CLIENT_CANCELLED. Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T23:17:06.791409Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] [] In Reconnect, ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2024-11-21T23:17:06.791473Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] [] New values: ReadSizeBudget = 8388608, ReadSizeServerDelta = 0 2024-11-21T23:17:06.791609Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] [] Closing session to cluster: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T23:17:06.792449Z node 5 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] [] Got error. Status: CLIENT_CANCELLED. Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T23:17:06.792628Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] [] In Reconnect, ReadSizeBudget = 0, ReadSizeServerDelta = 8388608 2024-11-21T23:17:06.792649Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] [] New values: ReadSizeBudget = 8388608, ReadSizeServerDelta = 0 2024-11-21T23:17:06.792700Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] [] Closing session to cluster: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T23:17:06.854170Z node 6 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0] got next reader event: 1 2024-11-21T23:17:06.854020Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:17:06.854117Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] [] Abort session to cluster 2024-11-21T23:17:06.854356Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] Closing read session. Close timeout: 0.000000s 2024-11-21T23:17:06.854451Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:0:1:16:17 2024-11-21T23:17:06.854506Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4852 BytesRead: 534 MessagesRead: 17 BytesReadCompressed: 534 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:17:06.854564Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] Closing read session. Close timeout: 0.000000s 2024-11-21T23:17:06.854617Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:0:1:16:17 2024-11-21T23:17:06.854664Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4852 BytesRead: 534 MessagesRead: 17 BytesReadCompressed: 534 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:17:06.854772Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0][reader 1] [] [48b9a245-4f162a21-bd88ab4d-4e431a06] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:17:06.867485Z node 6 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0] schedule consumer creation 2024-11-21T23:17:06.867810Z node 6 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1] got next reader event: 1 2024-11-21T23:17:06.867709Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:17:06.867778Z node 5 :PQ_MIRRORER DEBUG: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] [] Abort session to cluster 2024-11-21T23:17:06.867898Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] Closing read session. Close timeout: 0.000000s 2024-11-21T23:17:06.867942Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:1:1:10:11 2024-11-21T23:17:06.867975Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4858 BytesRead: 251 MessagesRead: 11 BytesReadCompressed: 251 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:17:06.868000Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] Closing read session. Close timeout: 0.000000s 2024-11-21T23:17:06.868023Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic2:1:1:10:11 2024-11-21T23:17:06.868048Z node 5 :PQ_MIRRORER INFO: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4858 BytesRead: 251 MessagesRead: 11 BytesReadCompressed: 251 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:17:06.868097Z node 5 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1][reader 1] [] [674f70d5-f087e168-7397697f-378d1c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:17:06.872883Z node 6 :PQ_MIRRORER NOTICE: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1] schedule consumer creation 2024-11-21T23:17:06.873010Z node 6 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0]: read session closed: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } 2024-11-21T23:17:06.873120Z node 6 :PQ_MIRRORER ERROR: [mirrorer for Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1]: read session closed: SessionClosed { Status: CLIENT_CANCELLED Issues: "
: Error: GRpc error: (1): Cancelled on the server side " } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:30.596010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:30.596101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:30.596138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:30.596194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:30.596239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:30.596269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:30.596344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:30.596690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:30.655713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:30.655776Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:30.674539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:30.678580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:30.678807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:30.692330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:30.692929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:30.693489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:30.693764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:30.699081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:30.700060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:30.700113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:30.700270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:30.700303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:30.700328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:30.700383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.709754Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:30.845208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:30.845460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.845691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:30.845905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:30.845975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.848810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:30.848986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:30.849191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.849248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:30.849295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:30.849334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:30.851411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.851475Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:30.851507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:30.853216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.853265Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.853306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:30.853347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:30.857597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:30.863146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:30.863414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:30.864474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:30.864657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:30.864716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:30.864991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:30.865056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:30.865266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:30.865415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:30.867915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:30.867962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:30.868173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:30.868221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:30.868645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:30.868694Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:30.868795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:30.868829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:30.868881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:30.868929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:30.869000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:30.869033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:30.869102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:30.869147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:30.869184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:30.871038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:30.871148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:30.871188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:30.871238Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:30.871300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:30.871405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... EMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet72075186233409549 2024-11-21T23:17:11.950214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2024-11-21T23:17:11.950251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2024-11-21T23:17:11.950335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet72075186233409549 2024-11-21T23:17:11.950476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2024-11-21T23:17:11.950604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2024-11-21T23:17:11.950660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2024-11-21T23:17:11.955565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T23:17:11.955823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T23:17:11.958604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2024-11-21T23:17:11.958683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2024-11-21T23:17:11.958858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 2] 2024-11-21T23:17:11.959022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2024-11-21T23:17:11.959072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:659:2574], at schemeshard: 72075186233409549, txId: 107, path id: 1 2024-11-21T23:17:11.959110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:659:2574], at schemeshard: 72075186233409549, txId: 107, path id: 2 2024-11-21T23:17:11.959690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T23:17:11.959751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2024-11-21T23:17:11.959853Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T23:17:11.959890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2024-11-21T23:17:11.959951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2024-11-21T23:17:11.960797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2024-11-21T23:17:11.960891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2024-11-21T23:17:11.960929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2024-11-21T23:17:11.960966Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2024-11-21T23:17:11.961014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2024-11-21T23:17:11.962273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2024-11-21T23:17:11.962367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2024-11-21T23:17:11.962583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2024-11-21T23:17:11.962620Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2024-11-21T23:17:11.962648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2024-11-21T23:17:11.962719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2024-11-21T23:17:11.965679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2024-11-21T23:17:11.965745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2024-11-21T23:17:11.966101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2024-11-21T23:17:11.966273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2024-11-21T23:17:11.966307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T23:17:11.966351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2024-11-21T23:17:11.966468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:810:2689] message: TxId: 107 2024-11-21T23:17:11.966513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2024-11-21T23:17:11.966547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2024-11-21T23:17:11.966595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2024-11-21T23:17:11.966744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2024-11-21T23:17:11.967820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2024-11-21T23:17:11.969612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2024-11-21T23:17:11.970890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2024-11-21T23:17:11.970941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2178:4027] TestWaitNotification: OK eventTxId 107 2024-11-21T23:17:11.989849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 776 RawX2: 4294969964 } TabletId: 72075186233409552 State: 4 2024-11-21T23:17:11.989954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2024-11-21T23:17:11.992153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2024-11-21T23:17:11.992757Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2024-11-21T23:17:11.996032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2024-11-21T23:17:11.996374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2024-11-21T23:17:11.997261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2024-11-21T23:17:11.997311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2024-11-21T23:17:11.997380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2024-11-21T23:17:12.000500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409549:4 2024-11-21T23:17:12.000594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2024-11-21T23:17:12.000974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2024-11-21T23:17:12.139315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2024-11-21T23:17:12.139527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2024-11-21T23:17:12.139640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2024-11-21T23:17:12.139728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2024-11-21T23:17:12.139759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2024-11-21T23:17:12.139787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2024-11-21T23:17:12.139823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2024-11-21T23:17:12.139849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2024-11-21T23:17:12.139878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2024-11-21T23:17:12.219489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:17:12.219872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.028000Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2024-11-21T23:17:12.223523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2024-11-21T23:17:12.223757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot |86.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2024-11-21T23:15:32.037206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:32.043421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:32.043637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c3d/r3tmp/tmpOcqNUs/pdisk_1.dat 2024-11-21T23:15:32.476384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.528231Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:32.585533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:32.585672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:32.597725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:32.725311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.782982Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:32.784054Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:32.784515Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:32.784760Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:32.853272Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:32.854024Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:32.854126Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:32.855878Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:32.855963Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:32.856016Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:32.856361Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:32.942816Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:32.943018Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:32.943119Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:32.943167Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:32.943210Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:32.943247Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:32.943698Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:32.943762Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:32.944229Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:32.944322Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:32.944458Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:32.944501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:32.944551Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:32.944609Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:32.944671Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:32.944725Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:32.944758Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:32.944792Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:32.944822Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:32.944867Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:32.944995Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:32.945051Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:32.945154Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:32.945368Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:32.945427Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:32.945556Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:32.945620Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:32.945660Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:32.945696Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:32.945744Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:32.945997Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:32.946035Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:32.946067Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:32.946097Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:32.946149Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:32.946190Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:32.946226Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:32.946275Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:32.946299Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:32.947950Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:32.948005Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:32.958818Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:32.958898Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:32.958986Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:32.959055Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:32.959118Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:33.163665Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.163725Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.163760Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:33.163878Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:33.163920Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:33.164026Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:33.164074Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:33.164121Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:33.164159Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:33.178322Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:33.181580Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:33.182165Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.182210Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.182293Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:33.182342Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:33.182381Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:33.182454Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:17:11.351222Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:17:11.351286Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:17:11.351525Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:842:2676], Recipient [13:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:11.351564Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:11.351617Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:17:11.351654Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:11.351690Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:11.351726Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2024-11-21T23:17:11.351780Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2024-11-21T23:17:11.351821Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T23:17:11.351889Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2024-11-21T23:17:11.351923Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T23:17:11.351959Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2024-11-21T23:17:11.352095Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2024-11-21T23:17:11.352135Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T23:17:11.352162Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T23:17:11.352183Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:11.352220Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T23:17:11.352256Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2024-11-21T23:17:11.352290Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2024-11-21T23:17:11.352318Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2024-11-21T23:17:11.352354Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T23:17:11.352373Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:11.352393Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T23:17:11.352414Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T23:17:11.352495Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T23:17:11.352519Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T23:17:11.352547Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T23:17:11.352580Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T23:17:11.352613Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T23:17:11.352632Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T23:17:11.352653Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T23:17:11.352672Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:11.352784Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2024-11-21T23:17:11.352805Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T23:17:11.352834Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:17:11.352863Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:17:11.352885Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2024-11-21T23:17:11.352916Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:17:11.352939Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2024-11-21T23:17:11.352970Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:11.353009Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:11.353037Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T23:17:11.353067Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T23:17:11.364303Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715665} 2024-11-21T23:17:11.364475Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T23:17:11.364629Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:11.364753Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:17:11.364895Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1030:2831], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:11.365014Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:11.365212Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715665} 2024-11-21T23:17:11.365254Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T23:17:11.365303Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:17:11.365333Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:11.365376Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1030:2831], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:11.365412Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:11.368286Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:557:2484], Recipient [13:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T23:17:11.368431Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:17:11.368547Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2024-11-21T23:17:11.368701Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T23:17:11.368765Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:17:11.368828Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:11.368878Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:11.368916Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2024-11-21T23:17:11.368965Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T23:17:11.368986Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:11.369008Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:17:11.369027Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:11.369149Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2024-11-21T23:17:11.369549Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2024-11-21T23:17:11.369616Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:557:2484], 1} after executionsCount# 1 2024-11-21T23:17:11.369688Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:557:2484], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:11.369900Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:557:2484], 1} finished in read 2024-11-21T23:17:11.369975Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T23:17:11.369996Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:11.370017Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:11.370050Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:11.370096Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T23:17:11.370114Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:11.370146Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2024-11-21T23:17:11.370210Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:17:11.370370Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2024-11-21T23:15:28.627978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875188852304007:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.628117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:28.942501Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000edd/r3tmp/tmpq9y1w9/pdisk_1.dat 2024-11-21T23:15:29.302949Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:29.349507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.349586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.367733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8623, node 1 2024-11-21T23:15:29.511045Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/000edd/r3tmp/yandex54UYNO.tmp 2024-11-21T23:15:29.511083Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/000edd/r3tmp/yandex54UYNO.tmp 2024-11-21T23:15:29.511290Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/000edd/r3tmp/yandex54UYNO.tmp 2024-11-21T23:15:29.511416Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.572469Z INFO: TTestServer started on Port 14373 GrpcPort 8623 TClient is connected to server localhost:14373 PQClient connected to localhost:8623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:29.991365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:30.062753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:30.291486Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:30.307028Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:15:32.533910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875206032173825:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.534752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875206032173798:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.535819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.539875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:15:32.582836Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2024-11-21T23:15:32.586647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875206032173835:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:32.999313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.039097Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875206032173909:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:33.058185Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWNlNzI2YmItMjVmNzZkNi04MDg1YjgwZC1jMWUzYmFhNg==, ActorId: [1:7439875206032173795:2307], ActorState: ExecuteState, TraceId: 01jd8g4t0c1f8f0nfwhzxb5tdv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:33.065807Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:33.097158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.260685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T23:15:33.630653Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875188852304007:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.630731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7439875210327141485:2624] === CheckClustersList. Ok 2024-11-21T23:15:38.734492Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T23:15:38.789210Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:15:38.790608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439875231801978217:2766], Recipient [1:7439875193147271558:2195]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:38.790644Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:38.790659Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T23:15:38.790696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439875231801978213:2763], Recipient [1:7439875193147271558:2195]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2024-11-21T23:15:38.790709Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:15:38.881040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:15:38.881493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:15:38.881792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T23:15:38.881833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T23:15:38.881865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T23:15:38.881906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T23:15:38.881950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T23:15:38.882084Z node 1 :FLAT_TX_ ... 336493938:2770]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:11.920426Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7439875586681786946:2430], Recipient [5:7439875586681787003:2433]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:11.920450Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:11.920452Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:11.920506Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7439875625336493853:2758], Partition 2, Sender [5:7439875625336493853:2758], Recipient [5:7439875625336493936:2768], Cookie: 0 2024-11-21T23:17:11.920519Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7439875625336493938:2770], Recipient [5:7439875625336493854:2759]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:11.920543Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:11.920549Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7439875625336493853:2758], Recipient [5:7439875625336493936:2768]: NKikimr::TEvPQ::TEvPartitionStatus 2024-11-21T23:17:11.920567Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2024-11-21T23:17:11.920612Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7439875586681787003:2433], Recipient [5:7439875586681786946:2430]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:11.920635Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:11.920770Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2024-11-21T23:17:11.920877Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7439875625336493936:2768], Recipient [5:7439875625336493853:2758]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:11.920892Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:11.920906Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7439875625336493853:2758], Partition 2, Sender [5:7439875625336493853:2758], Recipient [5:7439875625336493936:2768], Cookie: 0 2024-11-21T23:17:11.920945Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7439875625336493853:2758], Recipient [5:7439875625336493936:2768]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:11.920957Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:11.921292Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 10 DataSize: 0 UsedReserveSize: 0 2024-11-21T23:17:11.921433Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 3 2024-11-21T23:17:11.921636Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [5:7439875586681786941:2429], Recipient [5:7439875535142178272:2142]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 10 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2024-11-21T23:17:11.921673Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2024-11-21T23:17:11.921694Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2024-11-21T23:17:11.921722Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099995s, queue# 1 2024-11-21T23:17:11.925758Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [5:7439875586681786941:2429], Recipient [5:7439875535142178272:2142]: NKikimrSchemeOp.TDescribePath PathId: 13 SchemeshardId: 72057594046644480 2024-11-21T23:17:11.925800Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2024-11-21T23:17:11.980672Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [5:7439875535142178272:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T23:17:11.980725Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T23:17:11.980740Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T23:17:11.980786Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T23:17:11.980800Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2024-11-21T23:17:11.980861Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 15 shard idx 72057594046644480:7 data size 808 row count 3 2024-11-21T23:17:11.980904Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], pathId map=migrations, is column=0, is olap=0 2024-11-21T23:17:11.980918Z node 5 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 15: RowCount 3, DataSize 808 2024-11-21T23:17:11.980931Z node 5 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2024-11-21T23:17:11.980972Z node 5 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 3, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2024-11-21T23:17:11.981044Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2024-11-21T23:17:11.985260Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435088, Sender [0:0:0], Recipient [5:7439875535142178272:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2024-11-21T23:17:11.985312Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2024-11-21T23:17:11.985336Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2024-11-21T23:17:12.014819Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875625336493853:2758], Partition 2, Sender [0:0:0], Recipient [5:7439875625336493936:2768], Cookie: 0 2024-11-21T23:17:12.014898Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875625336493936:2768]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:12.014924Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:12.014972Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:12.015048Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:12.015071Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:12.015100Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:12.015159Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875586681786946:2430], Partition 0, Sender [0:0:0], Recipient [5:7439875586681787003:2433], Cookie: 0 2024-11-21T23:17:12.015191Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875586681787003:2433]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:12.015203Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:12.015225Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:12.015269Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:12.015283Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:12.015300Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:12.015337Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875625336493854:2759], Partition 1, Sender [0:0:0], Recipient [5:7439875625336493938:2770], Cookie: 0 2024-11-21T23:17:12.015366Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875625336493938:2770]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:12.015379Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:12.015400Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:12.015427Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:12.015441Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:12.015470Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:12.021893Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7439875535142178272:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2024-11-21T23:17:12.021956Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2024-11-21T23:17:12.021980Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T23:17:12.021991Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T23:17:12.022051Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2024-11-21T23:17:12.022374Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7439875535142178272:2142]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2024-11-21T23:17:12.022410Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2024-11-21T23:17:12.022420Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2024-11-21T23:16:26.305415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:26.305833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:26.305887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 10409, node 1 TClient is connected to server localhost:9242 2024-11-21T23:16:34.856370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:34.856746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:34.856865Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6315, node 2 TClient is connected to server localhost:21947 2024-11-21T23:16:44.430277Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:44.430445Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:44.430517Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 7697, node 3 TClient is connected to server localhost:10761 2024-11-21T23:16:56.603563Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:89:2135], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:56.604015Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:56.604237Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2084, node 4 TClient is connected to server localhost:16211 2024-11-21T23:17:10.107934Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:10.108109Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:10.170047Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 25779, node 5 TClient is connected to server localhost:28069 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 32431, MsgBus: 32514 2024-11-21T23:16:44.577650Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875512003164171:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:44.579882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001410/r3tmp/tmpiOE8vZ/pdisk_1.dat 2024-11-21T23:16:45.168124Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:45.178217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:45.178322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:45.186075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32431, node 1 2024-11-21T23:16:45.381059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:45.381092Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:45.381100Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:45.381368Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32514 TClient is connected to server localhost:32514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:46.216357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:46.256307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:46.456630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:46.666486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:46.768490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:48.689164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875529183034910:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:48.689268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:49.028754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.077904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.138491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.186210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.238340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.356602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:49.435658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875533478002715:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:49.439844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:49.459382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875533478002717:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:16:49.550803Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875512003164171:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:49.550916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:50.985219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 18791, MsgBus: 25190 2024-11-21T23:16:57.496069Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875571369421247:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001410/r3tmp/tmpvpL5Fi/pdisk_1.dat 2024-11-21T23:16:57.662239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:16:57.882672Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:57.886087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:57.886189Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:57.900399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18791, node 2 2024-11-21T23:16:58.091075Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:58.091110Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:58.091119Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:58.091251Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25190 TClient is connected to server localhost:25190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:59.491041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:59.521752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:59.653165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:59.953277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:00.069826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:02.405040Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875571369421247:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:02.405125Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:03.753204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875597139226577:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:03.753365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:03.813100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:03.877200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:03.938673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.001106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.074137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.152641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:04.233898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875601434194375:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:04.234017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:04.234245Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875601434194381:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:04.241228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:04.251760Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875601434194383:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:05.676269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |86.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] Test command err: Trying to start YDB, gRPC: 8289, MsgBus: 31629 2024-11-21T23:16:46.949580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875523992105985:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:46.949860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00140b/r3tmp/tmpa0haa0/pdisk_1.dat 2024-11-21T23:16:47.491930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:47.492131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:47.494796Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:47.496342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8289, node 1 2024-11-21T23:16:47.659036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:47.659064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:47.659072Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:47.659158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31629 TClient is connected to server localhost:31629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:16:48.461387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:48.482149Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:16:48.499857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:48.699155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:48.889368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:48.998504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:16:51.952586Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875523992105985:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:16:51.952651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:16:51.984483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875545466944024:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:51.984646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.355738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.412529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.448146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.505255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.565109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.638022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:16:52.780068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875549761911826:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.780163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.780602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875549761911832:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:52.785051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:16:52.807996Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:16:52.808673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875549761911834:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:16:54.685388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:16:57.095404Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g7bkk2sva8te08xe4hr02, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTUyYzg1NmYtZWI0ZmE0Y2ItMWQ4YzI2YjQtNmViYjFkZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:16:57.111383Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTUyYzg1NmYtZWI0ZmE0Y2ItMWQ4YzI2YjQtNmViYjFkZWM=, ActorId: [1:7439875558351846767:2466], ActorState: ExecuteState, TraceId: 01jd8g7bkk2sva8te08xe4hr02, Create QueryResponse for error on request, msg: 2024-11-21T23:16:57.664312Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439875571236749249:2552], TxId: 281474976710679, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTUyYzg1NmYtZWI0ZmE0Y2ItMWQ4YzI2YjQtNmViYjFkZWM=. TraceId : 01jd8g7cn76bp4wf989c1nvwdt. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T23:16:57.665598Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439875571236749250:2553], TxId: 281474976710679, task: 2. Ctx: { TraceId : 01jd8g7cn76bp4wf989c1nvwdt. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTUyYzg1NmYtZWI0ZmE0Y2ItMWQ4YzI2YjQtNmViYjFkZWM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439875571236749246:2466], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T23:16:57.667124Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTUyYzg1NmYtZWI0ZmE0Y2ItMWQ4YzI2YjQtNmViYjFkZWM=, ActorId: [1:7439875558351846767:2466], ActorState: ExecuteState, TraceId: 01jd8g7cn76bp4wf989c1nvwdt, Create QueryResponse for error on request, msg: 2024-11-21T23:16:59.154665Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g7d6vcqd4tmt0wzy7q4j7, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTUyYzg1NmYtZWI0ZmE0Y2ItMWQ4YzI2YjQtNmViYjFkZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:16:59.154940Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTUyYzg1NmYtZWI0ZmE0Y2ItMWQ4YzI2YjQtNmViYjFkZWM=, ActorId: [1:7439875558351846767:2466], ActorState: ExecuteState, TraceId: 01jd8g7d6vcqd4tmt0wzy7q4j7, Create QueryResponse for error on request, msg: 2024-11-21T23:16:59.262958Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:16:59.291672Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:17:00.704684Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:17:00.728751Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 27105, MsgBus: 28286 2024-11-21T23:17:01.789349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875586354249698:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:01.789516Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00140b/r3tmp/tmp4t9SUw/pdisk_1.dat 2024-11-21T23:17:02.145478Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:02.209305Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:02.209414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:02.211007Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27105, node 2 2024-11-21T23:17:02.442876Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:02.442900Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:02.442910Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:02.443027Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28286 TClient is connected to server localhost:28286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:03.259792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:03.267887Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:03.281968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:03.419081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:03.727375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:03.910038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:06.785617Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875586354249698:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:06.785705Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:07.328380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875612124055023:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:07.328516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:07.387407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:07.437467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:07.518253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:07.605067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:07.642075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:07.734991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:07.822204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875612124055535:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:07.822307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:07.822716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875612124055540:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:07.827300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:07.862005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875612124055542:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:08.960807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:10.777239Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g7s520w4ybtjw3mcpvmen, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ1MzM1ZDQtN2ExZDRkMzYtNDc5OTMzZDgtNDE0MzQxMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:17:10.777539Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGQ1MzM1ZDQtN2ExZDRkMzYtNDc5OTMzZDgtNDE0MzQxMDM=, ActorId: [2:7439875616419023163:2465], ActorState: ExecuteState, TraceId: 01jd8g7s520w4ybtjw3mcpvmen, Create QueryResponse for error on request, msg: 2024-11-21T23:17:11.269690Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439875629303925620:2547], TxId: 281474976710679, task: 1. Ctx: { TraceId : 01jd8g7t0bby69gskkq3rgegyb. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGQ1MzM1ZDQtN2ExZDRkMzYtNDc5OTMzZDgtNDE0MzQxMDM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2024-11-21T23:17:11.270833Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439875629303925621:2548], TxId: 281474976710679, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd8g7t0bby69gskkq3rgegyb. SessionId : ydb://session/3?node_id=2&id=OGQ1MzM1ZDQtN2ExZDRkMzYtNDc5OTMzZDgtNDE0MzQxMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7439875629303925617:2465], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T23:17:11.271911Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGQ1MzM1ZDQtN2ExZDRkMzYtNDc5OTMzZDgtNDE0MzQxMDM=, ActorId: [2:7439875616419023163:2465], ActorState: ExecuteState, TraceId: 01jd8g7t0bby69gskkq3rgegyb, Create QueryResponse for error on request, msg: 2024-11-21T23:17:12.453896Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8g7tfs56vtf60svqe5asxq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGQ1MzM1ZDQtN2ExZDRkMzYtNDc5OTMzZDgtNDE0MzQxMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2024-11-21T23:17:12.454211Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGQ1MzM1ZDQtN2ExZDRkMzYtNDc5OTMzZDgtNDE0MzQxMDM=, ActorId: [2:7439875616419023163:2465], ActorState: ExecuteState, TraceId: 01jd8g7tfs56vtf60svqe5asxq, Create QueryResponse for error on request, msg: 2024-11-21T23:17:12.520471Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:17:12.622783Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:17:14.033134Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2024-11-21T23:17:14.057462Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TStorageServiceTest::ShouldGetState [GOOD] |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> Viewer::JsonAutocompleteSchemePOST [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] Test command err: 2024-11-21T23:15:33.586345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:33.589890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:33.590078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002b19/r3tmp/tmppDz35F/pdisk_1.dat 2024-11-21T23:15:34.003685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:34.058982Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:34.112794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:34.112970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:34.124742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:34.243672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:34.296018Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:34.297332Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:34.297895Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:34.298205Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:34.361211Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:34.362056Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:34.362213Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:34.365060Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:34.365149Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:34.365212Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:34.365607Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:34.397307Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:34.397552Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:34.397666Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:34.397704Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:34.397738Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:34.397768Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.398207Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.398264Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.398733Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:34.398824Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:34.398948Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.398984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.399025Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:34.399072Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.399124Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:34.399199Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.399236Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:34.399266Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:34.399294Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:34.399332Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:34.399440Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:34.399495Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:34.399589Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:34.399791Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:34.399842Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:34.399931Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:34.400000Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:34.400033Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:34.400071Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:34.400115Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.400333Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:34.400365Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:34.400394Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:34.400418Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.400465Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:34.400488Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:34.400514Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:34.400557Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.400579Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:34.401724Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:34.401768Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:34.412523Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:34.412607Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.412715Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.412787Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:34.412870Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:34.599163Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.599234Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.599289Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:34.599435Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:34.599496Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:34.599701Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.599769Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:34.599810Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:34.599854Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:34.604350Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:34.604438Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.604934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.604972Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.605011Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.605049Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:34.605084Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.605133Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... leId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\002\000\004\000\000\000\002\000\000\000\004\000\000\000\000\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "index" Type: 1 } MaxValueSizeBytes: 4 } Columns { Column { Id: 3 Name: "value" Type: 1 } MaxValueSizeBytes: 4 } } 2024-11-21T23:17:14.715332Z node 13 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Int32 : 2, Int32 : 0) 2024-11-21T23:17:14.715423Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 2, Int32 : 0) table: [72057594046644480:2:1] 2024-11-21T23:17:14.715624Z node 13 :TX_DATASHARD TRACE: -- AddReadRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T23:17:14.715739Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T23:17:14.716136Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2024-11-21T23:17:14.716238Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:17:14.716302Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T23:17:14.716359Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:14.716411Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:14.716462Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2024-11-21T23:17:14.716576Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2024-11-21T23:17:14.716638Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:17:14.716663Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:14.716686Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T23:17:14.716710Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T23:17:14.716772Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/0 ImmediateWriteEdgeReplied# v3501/0 2024-11-21T23:17:14.716845Z node 13 :TX_DATASHARD TRACE: TSysLocks::GetLock: lock 281474976715663 not found 2024-11-21T23:17:14.716900Z node 13 :TX_DATASHARD TRACE: ValidateLocks: broken lock 281474976715663 expected 2:5 found 0:0 2024-11-21T23:17:14.717042Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 5 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2024-11-21T23:17:14.717146Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:17:14.717173Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T23:17:14.717199Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:17:14.717223Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T23:17:14.717333Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:17:14.717376Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:17:14.717435Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:14.717486Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:14.717539Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2024-11-21T23:17:14.717563Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:14.717596Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2024-11-21T23:17:14.718278Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:17:14.718352Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2024-11-21T23:17:14.718449Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2024-11-21T23:17:14.718569Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.719394Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MWI1ZjU3ZmYtODQxOWM0NDctNWRhZjNjNjYtZjhkNzNjMzI=, ActorId: [13:912:2730], ActorState: ExecuteState, TraceId: 01jd8g7xq31tf2h4rj6f3cjfn1, Create QueryResponse for error on request, msg: 2024-11-21T23:17:14.720386Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd8g7xq31tf2h4rj6f3cjfn1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWI1ZjU3ZmYtODQxOWM0NDctNWRhZjNjNjYtZjhkNzNjMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:14.720679Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:964:2730], Recipient [13:837:2672]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 964 RawX2: 55834577578 } TxBody: " \0018\001j3\010\001\032\'\n#\t\217\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\002 \005)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2024-11-21T23:17:14.720716Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:17:14.720826Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:837:2672], Recipient [13:837:2672]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:17:14.720861Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2024-11-21T23:17:14.720908Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:17:14.721055Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715663, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2024-11-21T23:17:14.721143Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2024-11-21T23:17:14.721188Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T23:17:14.721216Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2024-11-21T23:17:14.721241Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:14.721266Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:14.721307Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3500/18446744073709551615 ImmediateWriteEdge# v3501/18446744073709551615 ImmediateWriteEdgeReplied# v3501/18446744073709551615 2024-11-21T23:17:14.721351Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2024-11-21T23:17:14.721381Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T23:17:14.721406Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:14.721429Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2024-11-21T23:17:14.721453Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2024-11-21T23:17:14.721515Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193454 2024-11-21T23:17:14.721644Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715663 DataShard: 72075186224037888 Generation: 2 Counter: 5 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2024-11-21T23:17:14.721732Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T23:17:14.721798Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T23:17:14.721825Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2024-11-21T23:17:14.721847Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:17:14.721872Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2024-11-21T23:17:14.721923Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2024-11-21T23:17:14.722025Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2024-11-21T23:17:14.722067Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:17:14.722125Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:14.722175Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:14.722213Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2024-11-21T23:17:14.722239Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:14.722263Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2024-11-21T23:17:14.722320Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:17:14.722378Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2024-11-21T23:17:14.722473Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.723770Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:54:2101], Recipient [13:837:2672]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 13 Status: STATUS_NOT_FOUND ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2024-11-21T23:15:31.837660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:31.841001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:31.841163Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c46/r3tmp/tmpM2eZ8M/pdisk_1.dat 2024-11-21T23:15:32.279703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.355622Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:32.416312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:32.416456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:32.428190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:32.557103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.597883Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:32.599746Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:32.600299Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:32.600613Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:32.642998Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:32.643824Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:32.643908Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:32.645465Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:32.645537Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:32.645585Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:32.645895Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:32.682066Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:32.682307Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:32.682450Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:32.682492Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:32.682527Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:32.682567Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:32.683042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:32.683113Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:32.683646Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:32.683790Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:32.683920Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:32.683953Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:32.683994Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:32.684047Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:32.684112Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:32.684175Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:32.684216Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:32.684253Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:32.684280Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:32.684321Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:32.684481Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:32.684521Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:32.684635Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:32.684910Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:32.684970Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:32.685079Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:32.685147Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:32.685183Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:32.685221Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:32.685278Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:32.685575Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:32.685607Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:32.685640Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:32.685670Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:32.685730Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:32.685758Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:32.685790Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:32.685842Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:32.685867Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:32.687324Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:32.687384Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:32.698244Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:32.698333Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:32.698453Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:32.698507Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:32.698577Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:32.913021Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:32.913092Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:32.913144Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:32.913262Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:32.913295Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:32.913428Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:32.913504Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:32.913552Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:32.913598Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:32.928949Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:32.929068Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:32.929757Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:32.929804Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:32.929859Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:32.929905Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:32.929942Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:32.929988Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:17:14.696056Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:17:14.696125Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:17:14.696401Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:14.696439Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:14.696486Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:17:14.696518Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:14.696550Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:14.696584Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2024-11-21T23:17:14.696615Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2024-11-21T23:17:14.696648Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:14.696679Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2024-11-21T23:17:14.696708Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T23:17:14.696736Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2024-11-21T23:17:14.696888Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2024-11-21T23:17:14.696933Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:14.696957Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T23:17:14.697004Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:14.697030Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T23:17:14.697069Z node 14 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2024-11-21T23:17:14.697104Z node 14 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2024-11-21T23:17:14.697136Z node 14 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2024-11-21T23:17:14.697177Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:14.697202Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:14.697229Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T23:17:14.697255Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T23:17:14.697353Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T23:17:14.697382Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T23:17:14.697418Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T23:17:14.697453Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T23:17:14.697477Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:14.697501Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T23:17:14.697540Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T23:17:14.697567Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:14.697700Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2024-11-21T23:17:14.697729Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T23:17:14.697765Z node 14 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:17:14.697798Z node 14 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:17:14.697831Z node 14 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:14.697854Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:17:14.697880Z node 14 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2024-11-21T23:17:14.697916Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:14.697948Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:14.697983Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T23:17:14.698015Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T23:17:14.719823Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3000 txid# 281474976715664} 2024-11-21T23:17:14.719972Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2024-11-21T23:17:14.720098Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:14.720172Z node 14 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:17:14.720280Z node 14 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [14:999:2800], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:14.720372Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.720801Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3000 txid# 281474976715664} 2024-11-21T23:17:14.720843Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2024-11-21T23:17:14.720885Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:17:14.720918Z node 14 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:14.720961Z node 14 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [14:999:2800], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:14.721027Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:14.722859Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2024-11-21T23:17:14.723041Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:17:14.723151Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T23:17:14.723321Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:14.723387Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:17:14.723453Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:14.723522Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:14.723566Z node 14 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T23:17:14.723631Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:14.723660Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:14.723684Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:17:14.723711Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:14.723873Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2024-11-21T23:17:14.724377Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2024-11-21T23:17:14.724458Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 1} after executionsCount# 1 2024-11-21T23:17:14.724566Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:14.724900Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} finished in read 2024-11-21T23:17:14.725007Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:14.725037Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:14.725067Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:14.725098Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:14.725153Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:14.725178Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:14.725218Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T23:17:14.725277Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:17:14.725476Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite >> LabeledDbCounters::OneTablet [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters >> TestKinesisHttpProxy::GoodRequestPutRecords >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> TestYmqHttpProxy::TestSendMessage >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TopicAutoscaling::CDC_Write [FAIL] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TestYmqHttpProxy::TestCreateQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2024-11-21T23:16:27.820576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:27.821067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:27.821119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 1039, node 1 TClient is connected to server localhost:10659 2024-11-21T23:16:36.417663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:36.417956Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:36.418085Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 18760, node 2 TClient is connected to server localhost:23317 2024-11-21T23:16:46.004528Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:46.004705Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:46.004777Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 30347, node 3 TClient is connected to server localhost:11366 2024-11-21T23:16:59.604161Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:89:2135], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:59.604643Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:59.604864Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 25613, node 4 TClient is connected to server localhost:13447 2024-11-21T23:17:14.135276Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:14.135541Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:14.135715Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 22126, node 5 TClient is connected to server localhost:2868 >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [FAIL] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [FAIL] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [FAIL] >> TCheckpointStorageTest::ShouldDeleteGraph >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [FAIL] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull >> TCheckpointStorageTest::ShouldDeleteGraph [FAIL] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [FAIL] >> TestKinesisHttpProxy::MissingAction >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [FAIL] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc >> TUserAccountServiceTest::Get >> TAccessServiceTest::Authenticate >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2024-11-21T23:16:47.279920Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7439875522522566124:2048] with connection to localhost:65033:local 2024-11-21T23:16:47.280014Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:47.714684Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:47.714717Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:47.719293Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:49.558831Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:49.558874Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:51.857544Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7439875537329286484:2048] with connection to localhost:65033:local 2024-11-21T23:16:51.857644Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:16:52.474349Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:16:52.474379Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:16:52.489123Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:16:55.297487Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:16:55.297525Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:16:55.297901Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T23:16:56.986720Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2024-11-21T23:16:56.986760Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T23:16:56.990712Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2024-11-21T23:16:59.114694Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2024-11-21T23:16:59.114729Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2024-11-21T23:16:59.121381Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:16:59.647251Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:17:01.334701Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7439875584169826305:2048] with connection to localhost:65033:local 2024-11-21T23:17:01.334798Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:17:01.933932Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:17:01.933987Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:17:01.934505Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:17:05.193500Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:17:05.193540Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:17:05.195675Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:17:06.456361Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2024-11-21T23:17:06.456408Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:17:06.464829Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2024-11-21T23:17:08.084527Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2024-11-21T23:17:08.084568Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2024-11-21T23:17:08.089628Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2024-11-21T23:17:08.662500Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2024-11-21T23:17:08.662549Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2024-11-21T23:17:08.666857Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2024-11-21T23:17:09.217564Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2024-11-21T23:17:09.217607Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2024-11-21T23:17:09.218420Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2024-11-21T23:17:09.793909Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2024-11-21T23:17:09.793950Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2024-11-21T23:17:09.794261Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2024-11-21T23:17:10.226569Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2024-11-21T23:17:10.226606Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2024-11-21T23:17:10.233124Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2024-11-21T23:17:10.745684Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2024-11-21T23:17:12.603801Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7439875630582778110:2048] with connection to localhost:65033:local 2024-11-21T23:17:12.603879Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2024-11-21T23:17:12.991391Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2024-11-21T23:17:12.991428Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2024-11-21T23:17:12.993182Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2024-11-21T23:17:14.609716Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2024-11-21T23:17:14.609756Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2024-11-21T23:17:14.610341Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2024-11-21T23:17:14.801784Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2024-11-21T23:17:14.801846Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2024-11-21T23:17:14.802275Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2024-11-21T23:17:14.802372Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2024-11-21T23:17:15.628414Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2024-11-21T23:17:15.628551Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2024-11-21T23:17:15.628588Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2024-11-21T23:17:15.731732Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2024-11-21T23:17:16.244318Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2024-11-21T23:17:16.244414Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2024-11-21T23:17:16.270174Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> TStorageTenantTest::CreateTableInsideSubDomain2 >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2024-11-21T23:15:33.303569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:33.309824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:33.310104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002b26/r3tmp/tmpg07A4I/pdisk_1.dat 2024-11-21T23:15:33.807477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.877955Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.935357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:33.935560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:33.947157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:34.068415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:34.106767Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:34.107888Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:34.108347Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:34.108612Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:34.163545Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:34.164387Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:34.164496Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:34.166239Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:34.166322Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:34.166384Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:34.167386Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:34.213119Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:34.213330Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:34.213437Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:34.213479Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:34.213518Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:34.213552Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.214029Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.214097Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.214654Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:34.214757Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:34.214882Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.214917Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.214957Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:34.215007Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.215074Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:34.215131Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.215169Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:34.215211Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:34.215247Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:34.215294Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:34.215435Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:34.215498Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:34.215610Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:34.215835Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:34.215884Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:34.216018Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:34.216075Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:34.216114Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:34.216154Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:34.216213Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.216488Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:34.216534Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:34.216569Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:34.216602Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.216656Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:34.216688Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:34.216717Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:34.216769Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.216795Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:34.218246Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:34.218298Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:34.231077Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:34.231163Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.231266Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.231321Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:34.231403Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:34.439281Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.439348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.439390Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:34.439538Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:34.439578Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:34.439713Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.439776Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:34.439820Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:34.439865Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:34.445265Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:34.445349Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.445924Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.445970Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.446020Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.446066Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:34.446101Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.446145Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... :2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.451829Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.452221Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709543002, quota bytes left# 18446744073709138191, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.452371Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.452415Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.452460Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.452885Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542903, quota bytes left# 18446744073709133439, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.453057Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.453101Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.453143Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.453548Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542804, quota bytes left# 18446744073709128687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.453675Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.453717Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.453761Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.454144Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542705, quota bytes left# 18446744073709123935, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.454314Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.454356Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.454462Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.454865Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542606, quota bytes left# 18446744073709119183, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.454989Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.455034Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.455075Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.455483Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542507, quota bytes left# 18446744073709114431, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.455648Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.455689Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.455732Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.456140Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542408, quota bytes left# 18446744073709109679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.456269Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.456310Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.456351Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.456761Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542309, quota bytes left# 18446744073709104927, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.456927Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.456971Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.457017Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.457399Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542210, quota bytes left# 18446744073709100175, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.457535Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.457583Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.457625Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.458008Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542111, quota bytes left# 18446744073709095423, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.458180Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.458223Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.458268Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.458710Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709542012, quota bytes left# 18446744073709090671, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.458838Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.458881Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.458923Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.459309Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541913, quota bytes left# 18446744073709085919, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.459494Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.459539Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.459579Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.459962Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541814, quota bytes left# 18446744073709081167, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.460109Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.460149Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.460195Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.460560Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541715, quota bytes left# 18446744073709076415, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.460709Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.460753Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.460794Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.461201Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 99, bytes# 4752, quota rows left# 18446744073709541616, quota bytes left# 18446744073709071663, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.461331Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:939:2753], Recipient [14:939:2753]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.461375Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 0 2024-11-21T23:17:21.461417Z node 14 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 0 2024-11-21T23:17:21.461533Z node 14 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[14:557:2484], 1} sends rowCount# 1, bytes# 48, quota rows left# 18446744073709541615, quota bytes left# 18446744073709071615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:21.461611Z node 14 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[14:557:2484], 1} finished in ReadContinue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict [GOOD] Test command err: 2024-11-21T23:15:34.580981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:34.590616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:34.590858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002b07/r3tmp/tmpEspJCl/pdisk_1.dat 2024-11-21T23:15:34.996808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:35.048848Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:35.108350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:35.108549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:35.120771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:35.238812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:35.291888Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:35.293050Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:35.293529Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:35.293804Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:35.354230Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:35.355284Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:35.355438Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:35.357305Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:35.357391Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:35.367609Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:35.368117Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:35.408515Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:35.408751Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:35.408898Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:35.408944Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:35.408986Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:35.409023Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:35.409509Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:35.409580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:35.410520Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:35.410657Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:35.410842Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:35.410888Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:35.410935Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:35.411010Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:35.411077Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:35.411151Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:35.411205Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:35.411248Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:35.411290Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:35.411343Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:35.411513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:35.411556Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:35.411672Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:35.411905Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:35.411968Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:35.412086Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:35.412154Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:35.412198Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:35.412239Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:35.412288Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:35.412574Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:35.412638Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:35.412686Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:35.412720Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:35.412780Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:35.412813Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:35.412847Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:35.412901Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:35.412937Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:35.414522Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:35.414586Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:35.426427Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:35.426515Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:35.426628Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:35.426706Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:35.426806Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:35.636863Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:35.636940Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:35.636982Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:35.637095Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:35.637164Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:35.637300Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:35.637366Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:35.637410Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:35.637453Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:35.648898Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:35.649034Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:35.649595Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:35.649636Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:35.649692Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:35.649741Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:35.649794Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:35.649847Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3001 txid# 281474976715667} 2024-11-21T23:17:21.672103Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3001} 2024-11-21T23:17:21.672163Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:21.672198Z node 14 :TX_DATASHARD TRACE: Complete execution for [3001:281474976715667] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:17:21.672256Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:21.673268Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [14:1122:2896], Recipient [14:630:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:17:21.673316Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:17:21.673357Z node 14 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [14:1121:2895], serverId# [14:1122:2896], sessionId# [0:0:0] 2024-11-21T23:17:21.674235Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2024-11-21T23:17:21.674413Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:17:21.674509Z node 14 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2024-11-21T23:17:21.674585Z node 14 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2024-11-21T23:17:21.674701Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T23:17:21.674888Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:21.674961Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:17:21.675028Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:21.675094Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:21.675172Z node 14 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T23:17:21.675231Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:21.675263Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:21.675288Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:17:21.675318Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:21.675485Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2024-11-21T23:17:21.676012Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2024-11-21T23:17:21.676063Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2024-11-21T23:17:21.676159Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T23:17:21.715213Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:1019:2817], Recipient [14:630:2536]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T23:17:21.715342Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2024-11-21T23:17:21.715427Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2024-11-21T23:17:21.715594Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2024-11-21T23:17:21.715844Z node 14 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1111:2871], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:21.715953Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:17:21.716033Z node 14 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2024-11-21T23:17:21.716420Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd8g84d557t3nakhj2wyx9w0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=MTQxOTMzMzMtYjM2YmQwNTctMzkyYTVjYWUtNDcyMTYzN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2024-11-21T23:17:21.716626Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd8g84d557t3nakhj2wyx9w0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=MTQxOTMzMzMtYjM2YmQwNTctMzkyYTVjYWUtNDcyMTYzN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:17:21.716747Z node 14 :KQP_EXECUTER DEBUG: ActorId: [14:1111:2871] TxId: 281474976715667. Ctx: { TraceId: 01jd8g84d557t3nakhj2wyx9w0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=MTQxOTMzMzMtYjM2YmQwNTctMzkyYTVjYWUtNDcyMTYzN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2024-11-21T23:17:21.716910Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:21.716988Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:21.718056Z node 14 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [14:557:2484], selfId: [14:50:2097], source: [14:1089:2871] 2024-11-21T23:17:21.718259Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:21.718898Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:21.718982Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:17:21.719046Z node 14 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2024-11-21T23:17:21.719111Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:21.719322Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2024-11-21T23:17:21.719887Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T23:17:21.719974Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 1} after executionsCount# 2 2024-11-21T23:17:21.720058Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2024-11-21T23:17:21.720352Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:21.720426Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:21.720492Z node 14 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:21.720556Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:21.720613Z node 14 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:21.720642Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:21.720678Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T23:17:21.720744Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:21.720804Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:17:21.720869Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:17:21.720938Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:17:21.721258Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.721337Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 2 2024-11-21T23:17:21.721597Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 2 2024-11-21T23:17:21.721789Z node 14 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2024-11-21T23:17:21.722885Z node 14 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=14&id=MTQxOTMzMzMtYjM2YmQwNTctMzkyYTVjYWUtNDcyMTYzN2U=, workerId: [14:1089:2871], local sessions count: 0 2024-11-21T23:17:21.723037Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [14:630:2536], Recipient [14:630:2536]: NKikimr::TEvDataShard::TEvReadContinue 2024-11-21T23:17:21.723092Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[14:557:2484], 1}, firstUnprocessedQuery# 4 2024-11-21T23:17:21.723319Z node 14 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[14:557:2484], 1}, FirstUnprocessedQuery# 4 2024-11-21T23:17:21.723453Z node 14 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[14:557:2484], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2024-11-21T23:17:21.723616Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[14:557:2484], 1} finished in ReadContinue 2024-11-21T23:17:21.723885Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [14:54:2101], Recipient [14:1019:2817]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 14 Status: STATUS_NOT_FOUND >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TUserAccountServiceTest::Get [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TAccessServiceTest::Authenticate [GOOD] >> TSubDomainTest::GenericCases [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> TestYmqHttpProxy::TestReceiveMessage >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2024-11-21T23:17:21.366068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875670482574783:2137];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:21.366680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019fa/r3tmp/tmp4CQIbW/pdisk_1.dat 2024-11-21T23:17:21.707338Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:21.767417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:21.767660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:21.769590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:22.019389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:22.031162Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2024-11-21T23:17:21.498546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875673685620773:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:21.500761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001a12/r3tmp/tmpv1n2PJ/pdisk_1.dat 2024-11-21T23:17:21.908132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:21.908244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:21.910257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:21.928467Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24135 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:22.204536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:22.217211Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:22.241047Z node 1 :GRPC_CLIENT DEBUG: [51600008fad0] Connect to grpc://localhost:29960 2024-11-21T23:17:22.243628Z node 1 :GRPC_CLIENT DEBUG: [51600008fad0] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2024-11-21T23:17:22.256812Z node 1 :GRPC_CLIENT DEBUG: [51600008fad0] Status 7 Permission Denied 2024-11-21T23:17:22.262011Z node 1 :GRPC_CLIENT DEBUG: [51600008fad0] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2024-11-21T23:17:22.265702Z node 1 :GRPC_CLIENT DEBUG: [51600008fad0] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TestKinesisHttpProxy::TestRequestWithIAM >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TestYmqHttpProxy::TestGetQueueUrl >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TestKinesisHttpProxy::DifferentContentTypes >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2024-11-21T23:17:11.154801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875631003177175:2145];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:11.166856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001274/r3tmp/tmpVDd7aP/pdisk_1.dat 2024-11-21T23:17:12.218600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:12.236103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:12.236223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:12.251836Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:12.252483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12619 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:12.572170Z node 1 :TX_PROXY DEBUG: actor# [1:7439875631003177301:2138] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:12.572220Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875635298145043:2442] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:12.572347Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875631003177323:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:12.572446Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875635298144933:2352][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7439875631003177323:2151], cookie# 1 2024-11-21T23:17:12.574089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875635298144938:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875635298144935:2352], cookie# 1 2024-11-21T23:17:12.574119Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875635298144939:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875635298144936:2352], cookie# 1 2024-11-21T23:17:12.574134Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875635298144940:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875635298144937:2352], cookie# 1 2024-11-21T23:17:12.574173Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875626708209678:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875635298144940:2352], cookie# 1 2024-11-21T23:17:12.574216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875635298144940:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875626708209678:2057], cookie# 1 2024-11-21T23:17:12.574241Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875635298144933:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875635298144937:2352], cookie# 1 2024-11-21T23:17:12.574271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875635298144933:2352][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:12.574288Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875626708209672:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875635298144938:2352], cookie# 1 2024-11-21T23:17:12.574306Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875626708209675:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875635298144939:2352], cookie# 1 2024-11-21T23:17:12.574344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875635298144938:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875626708209672:2051], cookie# 1 2024-11-21T23:17:12.574357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875635298144939:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875626708209675:2054], cookie# 1 2024-11-21T23:17:12.574374Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875635298144933:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875635298144935:2352], cookie# 1 2024-11-21T23:17:12.574420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875635298144933:2352][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:12.574463Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875635298144933:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875635298144936:2352], cookie# 1 2024-11-21T23:17:12.574477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875635298144933:2352][/dc-1] Unexpected sync response: sender# [1:7439875635298144936:2352], cookie# 1 2024-11-21T23:17:12.574543Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875631003177323:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2024-11-21T23:17:12.581535Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875631003177323:2151], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7439875635298144933:2352] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T23:17:12.581677Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875631003177323:2151], cacheItem# { Subscriber: { Subscriber: [1:7439875635298144933:2352] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T23:17:12.588883Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875635298145044:2443], recipient# [1:7439875635298145043:2442], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:17:12.588968Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875635298145043:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T23:17:12.644694Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875635298145043:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2024-11-21T23:17:12.647623Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875635298145043:2442] Handle TEvDescribeSchemeResult Forward to# [1:7439875635298145042:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2024-11-21T23:17:12.697232Z node 1 :TX_PROXY DEBUG: actor# [1:7439875631003177301:2138] Handle TEvProposeTransaction 2024-11-21T23:17:12.697267Z node 1 :TX_PROXY DEBUG: actor# [1:7439875631003177301:2138] TxId# 281474976710657 ProcessProposeTransaction 2024-11-21T23:17:12.697398Z node 1 :TX_PROXY DEBUG: actor# [1:7439875631003177301:2138] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7439875635298145049:2447] 2024-11-21T23:17:12.812004Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875635298145049:2447] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2024-11-21T23:17:12.812170Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875635298145049:2447] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2024-11-21T23:17:12.812299Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875631003177323:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:23.282670Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439875682756721867:3015][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439875665576851154:2053] 2024-11-21T23:17:23.282689Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439875682756721868:3016][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7439875665576851157:2056] 2024-11-21T23:17:23.282689Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][4:7439875682756721869:3015][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439875665576851157:2056] 2024-11-21T23:17:23.282713Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439875682756721857:3016][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7439875682756721863:3016] 2024-11-21T23:17:23.282725Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439875682756721856:3015][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439875682756721858:3015] 2024-11-21T23:17:23.282736Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7439875682756721857:3016][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7439875665576851496:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:23.282756Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439875682756721856:3015][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439875682756721860:3015] 2024-11-21T23:17:23.282759Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439875665576851151:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439875682756721865:3016] 2024-11-21T23:17:23.282773Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439875665576851151:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439875682756721864:3015] 2024-11-21T23:17:23.282778Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7439875682756721856:3015][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [4:7439875665576851496:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:23.282788Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439875665576851154:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439875682756721866:3016] 2024-11-21T23:17:23.282798Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7439875682756721856:3015][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7439875682756721862:3015] 2024-11-21T23:17:23.282799Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439875665576851154:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439875682756721867:3015] 2024-11-21T23:17:23.282816Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7439875682756721856:3015][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7439875665576851496:2137], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:23.282837Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439875665576851157:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439875682756721869:3015] 2024-11-21T23:17:23.282840Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7439875665576851496:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T23:17:23.282850Z node 4 :SCHEME_BOARD_REPLICA DEBUG: [4:7439875665576851157:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [4:7439875682756721868:3016] 2024-11-21T23:17:23.282911Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7439875665576851496:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7439875682756721857:3016] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T23:17:23.282996Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875665576851496:2137], cacheItem# { Subscriber: { Subscriber: [4:7439875682756721857:3016] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:23.283044Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7439875665576851496:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2024-11-21T23:17:23.283097Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7439875665576851496:2137], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7439875682756721856:3015] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T23:17:23.283148Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875665576851496:2137], cacheItem# { Subscriber: { Subscriber: [4:7439875682756721856:3015] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:23.283246Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439875682756721870:3017], recipient# [4:7439875682756721855:2303], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:23.530909Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439875665576851496:2137], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:23.531058Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875665576851496:2137], cacheItem# { Subscriber: { Subscriber: [4:7439875669871819362:2543] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:23.531158Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439875682756721875:3018], recipient# [4:7439875682756721874:2304], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:24.281629Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439875665576851496:2137], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:24.281797Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875665576851496:2137], cacheItem# { Subscriber: { Subscriber: [4:7439875682756721843:3013] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:24.281933Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439875687051689179:3022], recipient# [4:7439875687051689178:2305], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] Test command err: 2024-11-21T23:15:28.646846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875186716732749:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.657017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000efc/r3tmp/tmpkmoS2X/pdisk_1.dat 2024-11-21T23:15:28.979094Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:29.202818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.202922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.212156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:29.264893Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15963, node 1 2024-11-21T23:15:29.466060Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/000efc/r3tmp/yandexhBzmVm.tmp 2024-11-21T23:15:29.466088Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/000efc/r3tmp/yandexhBzmVm.tmp 2024-11-21T23:15:29.469288Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/000efc/r3tmp/yandexhBzmVm.tmp 2024-11-21T23:15:29.469533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.540552Z INFO: TTestServer started on Port 28881 GrpcPort 15963 TClient is connected to server localhost:28881 PQClient connected to localhost:15963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:30.044198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:30.091298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:30.297457Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:30.314253Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:15:32.431907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875203896602714:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.432025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875203896602734:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.432081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.443828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:15:32.467366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875203896602743:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:32.823255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.873713Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875203896602817:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:32.875130Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzQwMzZmMmItYWQwNjIwOC01Y2Q3ZmNhYi03NjIxYjMxYQ==, ActorId: [1:7439875203896602711:2307], ActorState: ExecuteState, TraceId: 01jd8g4sxyc02npg3zvvete3pq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:32.877079Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:32.896993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.990065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439875208191570384:2622] 2024-11-21T23:15:33.646958Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875186716732749:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.647059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:15:39.052050Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T23:15:39.096527Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:15:39.097707Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439875233961374420:2769], Recipient [1:7439875191011700445:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:39.097734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:39.097751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T23:15:39.097789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439875233961374416:2766], Recipient [1:7439875191011700445:2188]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2024-11-21T23:15:39.097803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:15:39.293771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:15:39.294311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:15:39.306980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T23:15:39.307043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T23:15:39.307079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T23:15:39.307117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T23:15:39.307167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2024-11-21T23:15:39.307325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:15:39. ... .597354Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPersQueue::TEvStatus 2024-11-21T23:17:23.597365Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Handle TEvPersQueue::TEvStatus 2024-11-21T23:17:23.597410Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188536, Sender [5:7439875648252330845:2431], Recipient [5:7439875678317103162:2749]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:23.597420Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:23.597462Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271187975, Sender [5:7439875648252330845:2431], Recipient [5:7439875678317103164:2750]: NKikimrPQ.TStatus GetStatForAllConsumers: true 2024-11-21T23:17:23.597476Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPersQueue::TEvStatus 2024-11-21T23:17:23.597486Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Handle TEvPersQueue::TEvStatus 2024-11-21T23:17:23.597531Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188536, Sender [5:7439875648252330845:2431], Recipient [5:7439875678317103164:2750]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:23.597543Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:23.597597Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7439875648252330844:2430], Partition 0, Sender [5:7439875648252330844:2430], Recipient [5:7439875648252330935:2437], Cookie: 0 2024-11-21T23:17:23.597635Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7439875648252330844:2430], Recipient [5:7439875648252330935:2437]: NKikimr::TEvPQ::TEvPartitionStatus 2024-11-21T23:17:23.597652Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2024-11-21T23:17:23.597872Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 4 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2024-11-21T23:17:23.598001Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7439875648252330844:2430], Partition 0, Sender [5:7439875648252330844:2430], Recipient [5:7439875648252330935:2437], Cookie: 0 2024-11-21T23:17:23.598045Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7439875648252330844:2430], Recipient [5:7439875648252330935:2437]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:23.598062Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:23.598105Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7439875648252330836:2429], Partition 1, Sender [5:7439875648252330836:2429], Recipient [5:7439875648252330934:2436], Cookie: 0 2024-11-21T23:17:23.598139Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7439875648252330836:2429], Recipient [5:7439875648252330934:2436]: NKikimr::TEvPQ::TEvPartitionStatus 2024-11-21T23:17:23.598151Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2024-11-21T23:17:23.598297Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 4 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2024-11-21T23:17:23.598407Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7439875648252330836:2429], Partition 1, Sender [5:7439875648252330836:2429], Recipient [5:7439875648252330934:2436], Cookie: 0 2024-11-21T23:17:23.598449Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7439875648252330836:2429], Recipient [5:7439875648252330934:2436]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:23.598462Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:23.598502Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7439875678317103162:2749], Partition 2, Sender [5:7439875678317103162:2749], Recipient [5:7439875678317103256:2761], Cookie: 0 2024-11-21T23:17:23.598535Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7439875678317103162:2749], Recipient [5:7439875678317103256:2761]: NKikimr::TEvPQ::TEvPartitionStatus 2024-11-21T23:17:23.598548Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2024-11-21T23:17:23.598693Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 4 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2024-11-21T23:17:23.598785Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7439875678317103162:2749], Partition 2, Sender [5:7439875678317103162:2749], Recipient [5:7439875678317103256:2761], Cookie: 0 2024-11-21T23:17:23.598822Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7439875678317103162:2749], Recipient [5:7439875678317103256:2761]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:23.598832Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:23.598877Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7439875678317103164:2750], Partition 3, Sender [5:7439875678317103164:2750], Recipient [5:7439875678317103247:2758], Cookie: 0 2024-11-21T23:17:23.598911Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7439875678317103164:2750], Recipient [5:7439875678317103247:2758]: NKikimr::TEvPQ::TEvPartitionStatus 2024-11-21T23:17:23.598924Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2024-11-21T23:17:23.599063Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 3, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 4 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2024-11-21T23:17:23.599154Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7439875678317103164:2750], Partition 3, Sender [5:7439875678317103164:2750], Recipient [5:7439875678317103247:2758], Cookie: 0 2024-11-21T23:17:23.599189Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7439875678317103164:2750], Recipient [5:7439875678317103247:2758]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2024-11-21T23:17:23.599201Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2024-11-21T23:17:23.599266Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7439875648252330935:2437], Recipient [5:7439875648252330844:2430]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:23.599288Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:23.599343Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7439875648252330934:2436], Recipient [5:7439875648252330836:2429]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:23.599355Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:23.599396Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7439875678317103256:2761], Recipient [5:7439875678317103162:2749]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:23.599407Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:23.599548Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7439875678317103247:2758], Recipient [5:7439875678317103164:2750]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:23.599559Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2024-11-21T23:17:23.600044Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 8 DataSize: 0 UsedReserveSize: 0 2024-11-21T23:17:23.600144Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][test-topic] ProcessPendingStats. PendingUpdates size 4 2024-11-21T23:17:23.600360Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [5:7439875648252330845:2431], Recipient [5:7439875596712722158:2130]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 8 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2024-11-21T23:17:23.600385Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2024-11-21T23:17:23.600404Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2024-11-21T23:17:23.600428Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099995s, queue# 1 >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter >> TStorageTenantTest::GenericCases >> TStorageTenantTest::DeclareAndDefine >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2024-11-21T23:15:32.530766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:32.538409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:32.538653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c36/r3tmp/tmpCOsAx3/pdisk_1.dat 2024-11-21T23:15:33.025742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.091710Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.152604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:33.152764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:33.166780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:33.297376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.340761Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:33.342070Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:33.346172Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:33.346563Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:33.401790Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:33.402651Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:33.402777Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:33.404615Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:33.404699Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:33.404782Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:33.405195Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:33.428921Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:33.429129Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:33.429228Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:33.429264Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:33.429296Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:33.429328Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:33.429763Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.429839Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.430475Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:33.430594Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:33.430755Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.430803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.430857Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:33.430935Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:33.431011Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:33.431090Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:33.431139Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:33.431178Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:33.431213Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:33.431270Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:33.431466Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:33.431524Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:33.431674Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:33.431950Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:33.432014Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:33.432153Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:33.432217Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:33.432256Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:33.432312Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:33.432358Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:33.432592Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:33.432636Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:33.432675Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:33.432704Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:33.432756Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:33.432782Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:33.432818Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:33.432873Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:33.432899Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:33.434421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:33.434486Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:33.445252Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:33.445337Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:33.445417Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:33.445485Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:33.445564Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:33.648211Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.648284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.648331Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:33.648465Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:33.648503Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:33.648643Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:33.648703Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:33.648768Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:33.648810Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:33.661249Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:33.661363Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:33.662006Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.662058Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.662121Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:33.662172Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:33.662212Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:33.662267Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:17:25.525416Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:25.525450Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:25.525492Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:17:25.525526Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:25.525559Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:25.525596Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T23:17:25.525626Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2024-11-21T23:17:25.525660Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.525692Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2024-11-21T23:17:25.525721Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T23:17:25.525751Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2024-11-21T23:17:25.525859Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T23:17:25.525897Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.525922Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T23:17:25.525948Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:25.525972Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T23:17:25.526009Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2024-11-21T23:17:25.526042Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2024-11-21T23:17:25.526074Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2024-11-21T23:17:25.526112Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.526137Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:25.526160Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T23:17:25.526184Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T23:17:25.526279Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T23:17:25.526307Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T23:17:25.526345Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T23:17:25.526381Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T23:17:25.526425Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.526451Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T23:17:25.526477Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T23:17:25.526509Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:25.526636Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2024-11-21T23:17:25.526665Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T23:17:25.526696Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:17:25.526731Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:17:25.526759Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.526783Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:17:25.526807Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2024-11-21T23:17:25.526841Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:25.526871Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:25.526902Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T23:17:25.526933Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T23:17:25.537995Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715666} 2024-11-21T23:17:25.538123Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T23:17:25.538256Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:25.538335Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:17:25.538474Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:25.538565Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:25.539007Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T23:17:25.539050Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T23:17:25.539091Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:17:25.539120Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:25.539163Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:25.539204Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:25.540914Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T23:17:25.541076Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:17:25.541187Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T23:17:25.541349Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:25.541440Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:17:25.541508Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:25.541566Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:25.541619Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T23:17:25.541683Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:25.541712Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:25.541734Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:17:25.541756Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:25.541909Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2024-11-21T23:17:25.542352Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T23:17:25.542454Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2024-11-21T23:17:25.542549Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 3} after executionsCount# 1 2024-11-21T23:17:25.542634Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:25.542908Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 3} finished in read 2024-11-21T23:17:25.543025Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:25.543059Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:25.543088Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:25.543119Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:25.543179Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:25.543204Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:25.543238Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T23:17:25.543307Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:17:25.543521Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TStorageTenantTest::CreateTableInsideSubDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2024-11-21T23:15:33.536466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:33.542864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:33.543101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002b3a/r3tmp/tmpHZdGMq/pdisk_1.dat 2024-11-21T23:15:33.941582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.990614Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:34.044564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:34.044700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:34.056451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:34.173738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:34.216139Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:34.217445Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:34.217943Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:34.218235Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:34.271400Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:34.272232Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:34.272368Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:34.274335Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:34.274562Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:34.274629Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:34.275046Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:34.307055Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:34.307298Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:34.307437Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:34.307499Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:34.307545Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:34.307584Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.308050Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.308125Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.308665Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:34.308773Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:34.308887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.308920Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.308965Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:34.309048Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.309180Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:34.309242Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.309341Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:34.309376Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:34.309408Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:34.309450Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:34.309574Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:34.309614Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:34.309749Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:34.310006Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:34.310143Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:34.310262Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:34.310324Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:34.310367Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:34.310541Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:34.310590Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.310857Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:34.310898Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:34.310938Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:34.310992Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.311065Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:34.311094Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:34.311130Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:34.311164Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.311188Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:34.313012Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:34.313069Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:34.324180Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:34.324272Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.324314Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.324387Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:34.324467Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:34.519502Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.519580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.519624Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:34.519745Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:34.519788Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:34.519924Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.519969Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:34.520033Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:34.520081Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:34.525096Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:34.525190Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.525799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.525845Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.525905Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.525953Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:34.525997Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.526056Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:17:25.798862Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:842:2676], Recipient [14:842:2676]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:25.798903Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:25.798953Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:17:25.798990Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:25.799025Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:25.799070Z node 14 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2024-11-21T23:17:25.799109Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2024-11-21T23:17:25.799145Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.799178Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2024-11-21T23:17:25.799216Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T23:17:25.799252Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2024-11-21T23:17:25.799411Z node 14 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2024-11-21T23:17:25.799475Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.799508Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T23:17:25.799542Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:25.799578Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T23:17:25.799625Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2024-11-21T23:17:25.799666Z node 14 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2024-11-21T23:17:25.799707Z node 14 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2024-11-21T23:17:25.799752Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.799779Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:25.799807Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T23:17:25.799835Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T23:17:25.799943Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T23:17:25.799972Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T23:17:25.800018Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T23:17:25.800055Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T23:17:25.800086Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.800115Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T23:17:25.800143Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T23:17:25.800175Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:25.800346Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2024-11-21T23:17:25.800387Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T23:17:25.800428Z node 14 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:17:25.800470Z node 14 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:17:25.800508Z node 14 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2024-11-21T23:17:25.800535Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:17:25.800567Z node 14 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2024-11-21T23:17:25.800603Z node 14 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:25.800640Z node 14 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:25.800682Z node 14 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T23:17:25.800744Z node 14 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T23:17:25.812175Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 281474976715666} 2024-11-21T23:17:25.812331Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T23:17:25.812480Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:25.812577Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:17:25.812694Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:25.812806Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:25.813006Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3500 txid# 281474976715666} 2024-11-21T23:17:25.813045Z node 14 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2024-11-21T23:17:25.813096Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:17:25.813128Z node 14 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:25.813175Z node 14 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [14:1035:2832], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:25.813220Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:25.815494Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [14:557:2484], Recipient [14:630:2536]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T23:17:25.815699Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:17:25.815830Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2024-11-21T23:17:25.815981Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:25.816066Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:17:25.816142Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:25.816212Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:25.816267Z node 14 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2024-11-21T23:17:25.816338Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:25.816378Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:25.816408Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:17:25.816441Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:25.816614Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2024-11-21T23:17:25.817129Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T23:17:25.817217Z node 14 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2024-11-21T23:17:25.817303Z node 14 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[14:557:2484], 10} after executionsCount# 1 2024-11-21T23:17:25.817430Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:25.817729Z node 14 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[14:557:2484], 10} finished in read 2024-11-21T23:17:25.817853Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:25.817888Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:25.817923Z node 14 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:25.817956Z node 14 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:25.818023Z node 14 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:25.818051Z node 14 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:25.818088Z node 14 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T23:17:25.818158Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:17:25.818407Z node 14 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TStorageTenantTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2024-11-21T23:17:22.162515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875675346034466:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:22.166058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023e7/r3tmp/tmpXKK3h6/pdisk_1.dat 2024-11-21T23:17:22.552396Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:22.574489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:22.577453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:22.593256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30491 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:22.744212Z node 1 :TX_PROXY DEBUG: actor# [1:7439875675346034552:2138] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:22.744287Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875675346034979:2433] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:22.744431Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875675346034621:2165], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:22.744479Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875675346034621:2165], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:22.744765Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:22.746850Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066924:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875675346034984:2434] 2024-11-21T23:17:22.746915Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066924:2051] Subscribe: subscriber# [1:7439875675346034984:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:22.746996Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066927:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875675346034985:2434] 2024-11-21T23:17:22.747016Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066927:2054] Subscribe: subscriber# [1:7439875675346034985:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:22.747042Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066930:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875675346034986:2434] 2024-11-21T23:17:22.747057Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066930:2057] Subscribe: subscriber# [1:7439875675346034986:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:22.747150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034984:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875671051066924:2051] 2024-11-21T23:17:22.747188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034985:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875671051066927:2054] 2024-11-21T23:17:22.747215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034986:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875671051066930:2057] 2024-11-21T23:17:22.747252Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875675346034981:2434] 2024-11-21T23:17:22.747281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875675346034982:2434] 2024-11-21T23:17:22.747335Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875675346034980:2434][/dc-1] Set up state: owner# [1:7439875675346034621:2165], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:22.747438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875675346034983:2434] 2024-11-21T23:17:22.747506Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875675346034980:2434][/dc-1] Path was already updated: owner# [1:7439875675346034621:2165], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:22.747544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034984:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875675346034981:2434], cookie# 1 2024-11-21T23:17:22.747559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034985:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875675346034982:2434], cookie# 1 2024-11-21T23:17:22.747571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034986:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875675346034983:2434], cookie# 1 2024-11-21T23:17:22.747619Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066924:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875675346034984:2434] 2024-11-21T23:17:22.747647Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066924:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875675346034984:2434], cookie# 1 2024-11-21T23:17:22.747667Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066927:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875675346034985:2434] 2024-11-21T23:17:22.747682Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066927:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875675346034985:2434], cookie# 1 2024-11-21T23:17:22.747695Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066930:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875675346034986:2434] 2024-11-21T23:17:22.747714Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066930:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875675346034986:2434], cookie# 1 2024-11-21T23:17:22.748574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034984:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875671051066924:2051], cookie# 1 2024-11-21T23:17:22.748615Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034985:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875671051066927:2054], cookie# 1 2024-11-21T23:17:22.748656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875675346034986:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875671051066930:2057], cookie# 1 2024-11-21T23:17:22.748693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875675346034981:2434], cookie# 1 2024-11-21T23:17:22.748733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:22.748749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875675346034982:2434], cookie# 1 2024-11-21T23:17:22.748771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:22.748799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875675346034983:2434], cookie# 1 2024-11-21T23:17:22.748818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875675346034980:2434][/dc-1] Unexpected sync response: sender# [1:7439875675346034983:2434], cookie# 1 2024-11-21T23:17:22.811486Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875675346034621:2165], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:22.811941Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875675346034621:2165], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... ng: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:25.501761Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066927:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875688230937485:2883] 2024-11-21T23:17:25.501769Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066924:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875688230937484:2883] 2024-11-21T23:17:25.501779Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066930:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875688230937486:2883] 2024-11-21T23:17:25.501797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875688230937474:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:25.501911Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875675346034621:2165], cacheItem# { Subscriber: { Subscriber: [1:7439875688230937472:2882] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:25.501951Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875675346034621:2165], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T23:17:25.502001Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875675346034621:2165], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7439875688230937473:2883] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T23:17:25.502493Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066924:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7439875688230937490:2884] 2024-11-21T23:17:25.502521Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066924:2051] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2024-11-21T23:17:25.502973Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875675346034621:2165], cacheItem# { Subscriber: { Subscriber: [1:7439875688230937473:2883] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:25.503039Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066927:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7439875688230937491:2884] 2024-11-21T23:17:25.503050Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066927:2054] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2024-11-21T23:17:25.503100Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066927:2054] Subscribe: subscriber# [1:7439875688230937491:2884], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:25.503130Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066930:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7439875688230937492:2884] 2024-11-21T23:17:25.503138Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066930:2057] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2024-11-21T23:17:25.504530Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066924:2051] Subscribe: subscriber# [1:7439875688230937490:2884], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:25.504632Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875688230937493:2885], recipient# [1:7439875688230937469:2286], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:25.504682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875688230937491:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875671051066927:2054] 2024-11-21T23:17:25.504707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875688230937490:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875671051066924:2051] 2024-11-21T23:17:25.504745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875688230937474:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875688230937488:2884] 2024-11-21T23:17:25.504781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875688230937474:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875688230937487:2884] 2024-11-21T23:17:25.505400Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875671051066930:2057] Subscribe: subscriber# [1:7439875688230937492:2884], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:25.505478Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066927:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875688230937491:2884] 2024-11-21T23:17:25.505495Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066924:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875688230937490:2884] 2024-11-21T23:17:25.506482Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875688230937474:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7439875675346034621:2165], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:25.506540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875688230937492:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875671051066930:2057] 2024-11-21T23:17:25.506575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875688230937474:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875688230937489:2884] 2024-11-21T23:17:25.506628Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875688230937474:2884][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7439875675346034621:2165], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:25.506702Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875675346034621:2165], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2024-11-21T23:17:25.506778Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875671051066930:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875688230937492:2884] 2024-11-21T23:17:25.506787Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875675346034621:2165], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7439875688230937474:2884] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T23:17:25.506901Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875675346034621:2165], cacheItem# { Subscriber: { Subscriber: [1:7439875688230937474:2884] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:25.506992Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875688230937494:2886], recipient# [1:7439875688230937471:2288], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> Viewer::TabletMerging [FAIL] >> Viewer::TabletMergingPacked >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |86.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc [GOOD] >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> TStorageTenantTest::LsLs >> KqpFlipJoin::RightSemi_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2024-11-21T23:17:23.809999Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875681070805365:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:23.810108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023da/r3tmp/tmpev8oL1/pdisk_1.dat 2024-11-21T23:17:24.239030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:24.239192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:24.243282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:24.251039Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61692 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:24.500890Z node 1 :TX_PROXY DEBUG: actor# [1:7439875681070805588:2137] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:24.500941Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875685365773311:2433] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:24.501060Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875681070805685:2188], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:24.501088Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875681070805685:2188], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:24.501301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:24.503446Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805257:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875685365773316:2434] 2024-11-21T23:17:24.503535Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875681070805257:2051] Subscribe: subscriber# [1:7439875685365773316:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:24.503568Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805260:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875685365773317:2434] 2024-11-21T23:17:24.503608Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805263:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875685365773318:2434] 2024-11-21T23:17:24.503628Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875681070805263:2057] Subscribe: subscriber# [1:7439875685365773318:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:24.503648Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875681070805260:2054] Subscribe: subscriber# [1:7439875685365773317:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:24.503681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773316:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875681070805257:2051] 2024-11-21T23:17:24.503749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773318:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875681070805263:2057] 2024-11-21T23:17:24.503769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773317:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875681070805260:2054] 2024-11-21T23:17:24.503774Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805257:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875685365773316:2434] 2024-11-21T23:17:24.503793Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805263:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875685365773318:2434] 2024-11-21T23:17:24.503809Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805260:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875685365773317:2434] 2024-11-21T23:17:24.503820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875685365773313:2434] 2024-11-21T23:17:24.503855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875685365773315:2434] 2024-11-21T23:17:24.503910Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875685365773312:2434][/dc-1] Set up state: owner# [1:7439875681070805685:2188], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:24.504044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875685365773314:2434] 2024-11-21T23:17:24.504125Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875685365773312:2434][/dc-1] Path was already updated: owner# [1:7439875681070805685:2188], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:24.504174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773316:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875685365773313:2434], cookie# 1 2024-11-21T23:17:24.504279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773317:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875685365773314:2434], cookie# 1 2024-11-21T23:17:24.504295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773318:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875685365773315:2434], cookie# 1 2024-11-21T23:17:24.504337Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805257:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875685365773316:2434], cookie# 1 2024-11-21T23:17:24.504361Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805260:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875685365773317:2434], cookie# 1 2024-11-21T23:17:24.504416Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805263:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875685365773318:2434], cookie# 1 2024-11-21T23:17:24.504487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773316:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875681070805257:2051], cookie# 1 2024-11-21T23:17:24.504504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773317:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875681070805260:2054], cookie# 1 2024-11-21T23:17:24.504514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875685365773318:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875681070805263:2057], cookie# 1 2024-11-21T23:17:24.504556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875685365773313:2434], cookie# 1 2024-11-21T23:17:24.504589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:24.504604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875685365773314:2434], cookie# 1 2024-11-21T23:17:24.504635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:24.504657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875685365773315:2434], cookie# 1 2024-11-21T23:17:24.504669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875685365773312:2434][/dc-1] Unexpected sync response: sender# [1:7439875685365773315:2434], cookie# 1 2024-11-21T23:17:24.564980Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875681070805685:2188], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:24.565385Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875681070805685:2188], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... hemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [1:7439875689660740871:2630] 2024-11-21T23:17:25.971805Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805263:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [1:7439875689660740873:2630] 2024-11-21T23:17:25.971820Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875681070805685:2188], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231044831 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:25.972058Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875681070805685:2188], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231044831 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [1:7439875685365773312:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732231044831 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [1:7439875685365773312:2434] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732231044831 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2024-11-21T23:17:25.972172Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875681070805685:2188], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 } 2024-11-21T23:17:25.972496Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875681070805685:2188], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7439875689660740867:2630] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732231044978 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [1:7439875689660740867:2630] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1732231044978 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2024-11-21T23:17:25.972568Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805260:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7439875690073656980:2223] 2024-11-21T23:17:25.972590Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805257:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7439875690073656979:2223] 2024-11-21T23:17:25.972607Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875681070805263:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7439875690073656981:2223] 2024-11-21T23:17:25.975387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:25.975410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:25.975433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:25.975452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:25.975556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2024-11-21T23:17:25.975581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2024-11-21T23:17:25.983370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2024-11-21T23:17:25.983709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T23:17:25.984019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T23:17:25.984156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T23:17:25.984242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:25.984389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T23:17:25.984508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T23:17:25.984601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T23:17:25.984743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T23:17:25.984780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T23:17:25.984894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T23:17:25.985016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T23:17:25.985037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T23:17:25.985076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:17:25.988937Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2024-11-21T23:17:25.989027Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2024-11-21T23:17:25.989097Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2024-11-21T23:17:25.994214Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2024-11-21T23:17:26.002532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2024-11-21T23:17:26.002561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2024-11-21T23:17:26.002618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T23:17:26.002632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2024-11-21T23:17:26.002681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2024-11-21T23:17:26.002708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2024-11-21T23:17:26.002761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T23:17:26.002784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2024-11-21T23:17:26.002837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T23:17:26.002885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T23:17:26.009389Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T23:17:26.010107Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2024-11-21T23:17:26.194233Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2024-11-21T23:15:28.352057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875189087436385:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.352099Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f02/r3tmp/tmpiWSe3q/pdisk_1.dat 2024-11-21T23:15:28.712965Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:15:29.080365Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:29.081776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.084270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.090969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13396, node 1 2024-11-21T23:15:29.265182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/000f02/r3tmp/yandexAlSvUQ.tmp 2024-11-21T23:15:29.265210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/000f02/r3tmp/yandexAlSvUQ.tmp 2024-11-21T23:15:29.265389Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/000f02/r3tmp/yandexAlSvUQ.tmp 2024-11-21T23:15:29.265494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.373694Z INFO: TTestServer started on Port 7094 GrpcPort 13396 TClient is connected to server localhost:7094 PQClient connected to localhost:13396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:29.764511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:29.805840Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:29.826841Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:15:29.836317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:30.000945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:30.029518Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:15:32.227561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875206267306372:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.227716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875206267306364:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.227928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.236905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:15:32.298817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875206267306378:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:32.599726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.620768Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875206267306477:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:32.624328Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTIzYTEzYmUtZGMwNDljY2YtN2QxYjUxNGYtOWJlNDZjZjk=, ActorId: [1:7439875206267306361:2307], ActorState: ExecuteState, TraceId: 01jd8g4sqg6238hxwe9n8h8x7f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:32.632046Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:32.643632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:32.768675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7439875210562274033:2626] 2024-11-21T23:15:33.353561Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875189087436385:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.353632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:15:38.641090Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T23:15:38.681575Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:15:38.682934Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439875232037110768:2769], Recipient [1:7439875189087436810:2194]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:38.682960Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:38.682985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T23:15:38.683029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439875232037110764:2766], Recipient [1:7439875189087436810:2194]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2024-11-21T23:15:38.683052Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:15:38.855048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:15:38.855555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:15:38.855901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T23:15:38.856064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T23:15:38.856092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T23:15:38.856143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T23:15:38.856194Z node 1 :FLAT_ ... 17:28.876581Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.876628Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.876693Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875686118795280:2970], Partition 4, Sender [0:0:0], Recipient [5:7439875686118795383:2984], Cookie: 0 2024-11-21T23:17:28.876726Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875686118795383:2984]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.876740Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.876763Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.876796Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.876827Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.876845Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.876884Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875686118795276:2969], Partition 3, Sender [0:0:0], Recipient [5:7439875686118795397:2991], Cookie: 0 2024-11-21T23:17:28.876914Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875686118795397:2991]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.876926Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.876947Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.876976Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.876990Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.877005Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.877043Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875664643958186:2826], Partition 1, Sender [0:0:0], Recipient [5:7439875664643958267:2835], Cookie: 0 2024-11-21T23:17:28.877072Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875664643958267:2835]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.877085Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.877105Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.877145Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.877160Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.877175Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.877219Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875664643958185:2825], Partition 2, Sender [0:0:0], Recipient [5:7439875664643958264:2833], Cookie: 0 2024-11-21T23:17:28.877247Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875664643958264:2833]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.877259Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.877278Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.877304Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.877318Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.877333Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.924213Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7439875552974805841:2135]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2024-11-21T23:17:28.924287Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2024-11-21T23:17:28.924307Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2024-11-21T23:17:28.924323Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2024-11-21T23:17:28.924403Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2024-11-21T23:17:28.925166Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7439875552974805841:2135]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2024-11-21T23:17:28.925211Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2024-11-21T23:17:28.925233Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2024-11-21T23:17:28.976589Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875613104349122:2443], Partition 0, Sender [0:0:0], Recipient [5:7439875613104349182:2446], Cookie: 0 2024-11-21T23:17:28.976668Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875613104349182:2446]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.976692Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.976734Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.976808Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.976838Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.976866Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.977064Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875686118795280:2970], Partition 4, Sender [0:0:0], Recipient [5:7439875686118795383:2984], Cookie: 0 2024-11-21T23:17:28.977108Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875686118795383:2984]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.977131Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.977154Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.977194Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.977216Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.977231Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.977275Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875686118795276:2969], Partition 3, Sender [0:0:0], Recipient [5:7439875686118795397:2991], Cookie: 0 2024-11-21T23:17:28.977313Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875686118795397:2991]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.977330Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.977351Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.977379Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.977399Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.977412Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.979982Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875664643958186:2826], Partition 1, Sender [0:0:0], Recipient [5:7439875664643958267:2835], Cookie: 0 2024-11-21T23:17:28.980044Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875664643958267:2835]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.980061Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.980095Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.980162Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.980184Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.980208Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2024-11-21T23:17:28.980271Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7439875664643958185:2825], Partition 2, Sender [0:0:0], Recipient [5:7439875664643958264:2833], Cookie: 0 2024-11-21T23:17:28.980309Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7439875664643958264:2833]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.980323Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2024-11-21T23:17:28.980345Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2024-11-21T23:17:28.980378Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2024-11-21T23:17:28.980394Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2024-11-21T23:17:28.980412Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc [GOOD] Test command err: 2024-11-21T23:15:33.469113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:33.473846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:33.474046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002b18/r3tmp/tmpHxlwQB/pdisk_1.dat 2024-11-21T23:15:33.879461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.954683Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:34.010880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:34.011022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:34.022577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:34.147058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:34.184947Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:34.185798Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:34.186181Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:34.186382Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:34.218224Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:34.218991Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:34.219094Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:34.220871Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:34.220943Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:34.221001Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:34.221249Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:34.253617Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:34.253846Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:34.253966Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:34.254018Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:34.254060Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:34.254118Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.254635Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.254707Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.255224Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:34.255332Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:34.255441Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.255511Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.255556Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:34.255612Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.255668Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:34.255723Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.255763Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:34.255794Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:34.255829Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:34.255894Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:34.256047Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:34.256089Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:34.256205Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:34.256461Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:34.256510Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:34.256610Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:34.256685Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:34.256723Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:34.256757Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:34.256813Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.257075Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:34.257110Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:34.257144Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:34.257178Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.257261Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:34.257295Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:34.257327Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:34.257381Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.257410Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:34.259029Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:34.259087Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:34.270985Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:34.271086Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.271182Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.271233Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:34.271304Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:34.467978Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.468038Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.468077Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:34.468197Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:34.468227Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:34.468349Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.468440Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:34.468485Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:34.468518Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:34.472790Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:34.472876Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.473626Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.473675Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.473725Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.473769Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:34.473804Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.473858Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 2024-11-21T23:17:29.822634Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:29.822696Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:29.822761Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2024-11-21T23:17:29.822785Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:29.822824Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2024-11-21T23:17:29.822894Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:17:29.822989Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2024-11-21T23:17:29.823300Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:632:2537], Recipient [15:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:29.823334Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:29.823377Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:29.823410Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2024-11-21T23:17:29.823438Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2024-11-21T23:17:29.823480Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:29.823589Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T23:17:29.823951Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T23:17:29.823987Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 3} after executionsCount# 2 2024-11-21T23:17:29.824026Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:29.824155Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 3} finished in read 2024-11-21T23:17:29.824224Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T23:17:29.824251Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:29.824277Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:29.824305Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:29.824347Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2024-11-21T23:17:29.824369Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:29.824392Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2024-11-21T23:17:29.824427Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:29.824478Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:17:29.824558Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:17:29.824628Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:17:29.825194Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=MjNkMDBhMDUtOTUzMmJlZjktMmM2ZjVmZDYtZGRjNWZiODI=, workerId: [15:1120:2893], local sessions count: 0 2024-11-21T23:17:29.826609Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T23:17:29.826738Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:17:29.826870Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2024-11-21T23:17:29.827026Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T23:17:29.827098Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:17:29.827165Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:29.827226Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:29.827288Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2024-11-21T23:17:29.827354Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T23:17:29.827382Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:29.827407Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:17:29.827433Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:29.827581Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T23:17:29.827969Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2024-11-21T23:17:29.828048Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 4} after executionsCount# 1 2024-11-21T23:17:29.828129Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:29.828345Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 4} finished in read 2024-11-21T23:17:29.828432Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T23:17:29.828460Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:29.828483Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:29.828509Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:29.828562Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2024-11-21T23:17:29.828587Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:29.828636Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2024-11-21T23:17:29.828693Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:17:29.829672Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2024-11-21T23:17:29.829805Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:17:29.829914Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2024-11-21T23:17:29.830050Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T23:17:29.830115Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:17:29.830173Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:29.830230Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:29.830304Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2024-11-21T23:17:29.830362Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T23:17:29.830411Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:29.830437Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:17:29.830466Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:29.830599Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2024-11-21T23:17:29.830975Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2024-11-21T23:17:29.831049Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:558:2485], 5} after executionsCount# 1 2024-11-21T23:17:29.831139Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:17:29.831377Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:558:2485], 5} finished in read 2024-11-21T23:17:29.831498Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T23:17:29.831531Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:29.831559Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:29.831591Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:29.831641Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2024-11-21T23:17:29.831666Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:29.831698Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2024-11-21T23:17:29.831761Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> TestKinesisHttpProxy::TestPing >> TestKinesisHttpProxy::CreateDeleteStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2024-11-21T23:17:13.876531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:13.885454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:13.885682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002bb3/r3tmp/tmpSdrxF4/pdisk_1.dat 2024-11-21T23:17:14.336050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:17:14.375938Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:14.425125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:14.425252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:14.436914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:14.560896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:14.609519Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:643:2545] 2024-11-21T23:17:14.609758Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:17:14.647224Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:17:14.647453Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:17:14.648912Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:17:14.649108Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:17:14.649150Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:17:14.649416Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:17:14.697886Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:17:14.698081Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:17:14.698195Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:669:2561] 2024-11-21T23:17:14.698234Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:17:14.698284Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:17:14.698323Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.699839Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:646:2547] 2024-11-21T23:17:14.700011Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:17:14.708293Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:17:14.708424Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:17:14.708952Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:636:2541], serverId# [1:656:2552], sessionId# [0:0:0] 2024-11-21T23:17:14.709198Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:14.709241Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:14.709283Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:17:14.709321Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:14.709530Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:17:14.709792Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:17:14.709892Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:17:14.711023Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:17:14.711163Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:17:14.712415Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2024-11-21T23:17:14.712476Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2024-11-21T23:17:14.712515Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2024-11-21T23:17:14.712774Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:17:14.712818Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2024-11-21T23:17:14.712904Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:17:14.712971Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:684:2567] 2024-11-21T23:17:14.712996Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T23:17:14.713019Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2024-11-21T23:17:14.713079Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:14.713979Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2024-11-21T23:17:14.714063Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2024-11-21T23:17:14.714165Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:17:14.714198Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:14.714231Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2024-11-21T23:17:14.714258Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:17:14.714600Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:637:2542], serverId# [1:664:2559], sessionId# [0:0:0] 2024-11-21T23:17:14.714707Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T23:17:14.714897Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2024-11-21T23:17:14.714965Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2024-11-21T23:17:14.715650Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:14.715722Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T23:17:14.726857Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:17:14.726981Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:17:14.727672Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T23:17:14.727734Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2024-11-21T23:17:14.911228Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:700:2582], serverId# [1:702:2584], sessionId# [0:0:0] 2024-11-21T23:17:14.911570Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:701:2583], serverId# [1:704:2586], sessionId# [0:0:0] 2024-11-21T23:17:14.914962Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2024-11-21T23:17:14.915037Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:14.915298Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:17:14.915330Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:14.915371Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2024-11-21T23:17:14.915693Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:17:14.915886Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:17:14.916410Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:17:14.916436Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.916496Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2024-11-21T23:17:14.916576Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:17:14.918254Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:17:14.918698Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:14.919673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:14.919705Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:14.919741Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:17:14.919939Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:17:14.920017Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:17:14.920313Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T23:17:14.920352Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:14.920663Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:14.920725Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, Loca ... n request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:17:30.569078Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:30.570854Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:17:30.570894Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:30.570993Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2024-11-21T23:17:30.571041Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:30.571255Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:30.571287Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:30.571321Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:17:30.571569Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:17:30.571687Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:17:30.572929Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:30.572993Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2024-11-21T23:17:30.573332Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:17:30.573681Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:30.574824Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 1000 txid# 281474976715657} 2024-11-21T23:17:30.574889Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2024-11-21T23:17:30.574974Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:17:30.576812Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:17:30.576872Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2024-11-21T23:17:30.576916Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2024-11-21T23:17:30.576990Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:30.577047Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:17:30.577139Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:30.577865Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:17:30.577909Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:17:30.577952Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:30.578692Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:17:30.578731Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:30.579521Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:30.579570Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:17:30.579622Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:17:30.579678Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:30.579716Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:17:30.579776Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:30.581083Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:30.581150Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T23:17:30.584514Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2024-11-21T23:17:30.584597Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2024-11-21T23:17:30.584946Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:17:30.585148Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:17:30.585825Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:17:30.585867Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:17:30.594986Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:748:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:30.595102Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:758:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:30.595190Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:30.600563Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:17:30.606475Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:30.606594Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T23:17:30.809612Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:30.809777Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2024-11-21T23:17:30.812564Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:762:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:17:30.922222Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8g8db1e55zrh2hmhj0zh53, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MWIzMzE0NzAtNDk3NWE3ZTEtMzAxZTIzYTMtMjY3Yjk5ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:30.922897Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:904:2708], serverId# [4:905:2709], sessionId# [0:0:0] 2024-11-21T23:17:30.923072Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T23:17:30.924405Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732231050924322 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T23:17:30.935425Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T23:17:30.935606Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T23:17:30.935653Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:31.011339Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8g8dnta30rdfqb8h5txs4m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDBiM2IwMDUtNzdjMmYzYzQtNmQxMDA4YzMtZDNhMTNiYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:31.011888Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2024-11-21T23:17:31.013077Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732231051012974 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T23:17:31.013247Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1732231051012974 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2024-11-21T23:17:31.025338Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2024-11-21T23:17:31.025534Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2024-11-21T23:17:31.025588Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:31.030135Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:939:2739], serverId# [4:940:2740], sessionId# [0:0:0] 2024-11-21T23:17:31.037368Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:941:2741], serverId# [4:942:2742], sessionId# [0:0:0] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestKinesisHttpProxy::TestWrongStream >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2024-11-21T23:17:26.425641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875695381534356:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:26.425698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023c1/r3tmp/tmpQlZXtk/pdisk_1.dat 2024-11-21T23:17:26.961405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:26.961511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:26.977300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:27.022093Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:19177 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:27.269158Z node 1 :TX_PROXY DEBUG: actor# [1:7439875695381534585:2138] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:27.269223Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875699676502317:2440] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:27.269367Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875695381534607:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:27.269412Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875695381534607:2151], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:27.269625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:27.271480Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534253:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875699676502322:2441] 2024-11-21T23:17:27.271559Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875695381534253:2051] Subscribe: subscriber# [1:7439875699676502322:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:27.271614Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534256:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875699676502323:2441] 2024-11-21T23:17:27.271632Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875695381534256:2054] Subscribe: subscriber# [1:7439875699676502323:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:27.271686Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534259:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875699676502324:2441] 2024-11-21T23:17:27.271706Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875695381534259:2057] Subscribe: subscriber# [1:7439875699676502324:2441], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:27.271772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502322:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875695381534253:2051] 2024-11-21T23:17:27.271816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502323:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875695381534256:2054] 2024-11-21T23:17:27.271838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502324:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875695381534259:2057] 2024-11-21T23:17:27.271881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875699676502319:2441] 2024-11-21T23:17:27.271908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875699676502320:2441] 2024-11-21T23:17:27.271959Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875699676502318:2441][/dc-1] Set up state: owner# [1:7439875695381534607:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:27.272061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875699676502321:2441] 2024-11-21T23:17:27.272110Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875699676502318:2441][/dc-1] Path was already updated: owner# [1:7439875695381534607:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:27.272144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502322:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875699676502319:2441], cookie# 1 2024-11-21T23:17:27.272162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502323:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875699676502320:2441], cookie# 1 2024-11-21T23:17:27.272178Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502324:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875699676502321:2441], cookie# 1 2024-11-21T23:17:27.272228Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534253:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875699676502322:2441] 2024-11-21T23:17:27.272269Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534253:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875699676502322:2441], cookie# 1 2024-11-21T23:17:27.272295Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534256:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875699676502323:2441] 2024-11-21T23:17:27.272307Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534256:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875699676502323:2441], cookie# 1 2024-11-21T23:17:27.272321Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534259:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875699676502324:2441] 2024-11-21T23:17:27.272333Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875695381534259:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875699676502324:2441], cookie# 1 2024-11-21T23:17:27.272682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502322:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875695381534253:2051], cookie# 1 2024-11-21T23:17:27.272707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502323:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875695381534256:2054], cookie# 1 2024-11-21T23:17:27.272724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875699676502324:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875695381534259:2057], cookie# 1 2024-11-21T23:17:27.272757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875699676502319:2441], cookie# 1 2024-11-21T23:17:27.272802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:27.272820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875699676502320:2441], cookie# 1 2024-11-21T23:17:27.272841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:27.272862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875699676502321:2441], cookie# 1 2024-11-21T23:17:27.272886Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875699676502318:2441][/dc-1] Unexpected sync response: sender# [1:7439875699676502321:2441], cookie# 1 2024-11-21T23:17:27.309906Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875695381534607:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:27.310245Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875695381534607:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:28.594735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:28.594742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:28.594774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:28.594809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2024-11-21T23:17:28.594916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2024-11-21T23:17:28.594996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2024-11-21T23:17:28.595039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2024-11-21T23:17:28.597763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2024-11-21T23:17:28.598245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2024-11-21T23:17:28.598456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2024-11-21T23:17:28.600128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2024-11-21T23:17:28.600329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2024-11-21T23:17:28.600501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2024-11-21T23:17:28.600674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:28.601027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2024-11-21T23:17:28.601189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2024-11-21T23:17:28.601312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2024-11-21T23:17:28.601475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2024-11-21T23:17:28.601584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2024-11-21T23:17:28.601694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2024-11-21T23:17:28.602512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2024-11-21T23:17:28.602686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2024-11-21T23:17:28.603842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2024-11-21T23:17:28.604776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T23:17:28.604807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2024-11-21T23:17:28.604865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2024-11-21T23:17:28.605506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T23:17:28.605548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T23:17:28.605900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2024-11-21T23:17:28.615932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2024-11-21T23:17:28.615976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2024-11-21T23:17:28.616032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2024-11-21T23:17:28.616056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2024-11-21T23:17:28.616077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2024-11-21T23:17:28.616156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2024-11-21T23:17:28.616189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2024-11-21T23:17:28.616196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2024-11-21T23:17:28.616214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2024-11-21T23:17:28.616225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2024-11-21T23:17:28.619123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2024-11-21T23:17:28.619152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2024-11-21T23:17:28.619198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2024-11-21T23:17:28.619206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2024-11-21T23:17:28.619227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2024-11-21T23:17:28.619245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2024-11-21T23:17:28.619316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2024-11-21T23:17:28.619368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T23:17:28.619413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2024-11-21T23:17:28.619434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2024-11-21T23:17:28.619504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2024-11-21T23:17:28.622112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2024-11-21T23:17:29.163374Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439875703119449304:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:29.163569Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875703119449304:2109], cacheItem# { Subscriber: { Subscriber: [3:7439875703119449421:2187] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:29.163705Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875707414416919:2309], recipient# [3:7439875707414416918:2292], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:30.166153Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439875703119449304:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:30.166276Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875703119449304:2109], cacheItem# { Subscriber: { Subscriber: [3:7439875703119449421:2187] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:30.166381Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875711709384217:2310], recipient# [3:7439875711709384216:2293], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream >> TestKinesisHttpProxy::ListShards >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |86.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> TStorageTenantTest::DeclareAndDefine [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin-ColumnStore >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> KqpJoinOrder::Chain65Nodes >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TStorageTenantTest::GenericCases [GOOD] >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 11093051060205138737 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2024-11-21T23:17:01.137113Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:01.142128Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:01.148442Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:01.156534Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:01.158632Z 1 00h02m38.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:01.472219Z 1 00h02m38.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:01.491107Z 1 00h02m38.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:01.528517Z 1 00h02m38.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:01.863801Z 1 00h02m38.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:02.081979Z 1 00h02m38.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:02.138353Z 1 00h02m38.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:02.362316Z 1 00h02m39.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:02.365388Z 1 00h02m39.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:02.383118Z 1 00h02m39.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:02.488991Z 1 00h02m39.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:02.503393Z 1 00h02m39.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:02.989956Z 1 00h02m39.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.007949Z 1 00h02m39.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.097696Z 1 00h02m40.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.126918Z 1 00h02m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.127733Z 1 00h02m40.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.335975Z 1 00h02m40.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.368626Z 1 00h02m40.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.651466Z 1 00h02m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.752114Z 1 00h02m40.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.783597Z 1 00h02m40.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.807981Z 1 00h02m40.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.822862Z 1 00h02m40.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.838038Z 1 00h02m40.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.904322Z 1 00h02m41.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.932974Z 1 00h02m41.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:03.933902Z 1 00h02m41.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:04.208520Z 1 00h02m41.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:04.232998Z 1 00h02m41.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:04.517209Z 1 00h02m41.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:04.817912Z 1 00h02m41.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.041271Z 1 00h02m41.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.101132Z 1 00h02m42.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.196655Z 1 00h02m42.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.200358Z 1 00h02m42.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.241221Z 1 00h02m42.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.255977Z 1 00h02m42.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.470892Z 1 00h02m42.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.803125Z 1 00h02m42.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:05.963360Z 1 00h02m42.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.008865Z 1 00h02m43.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.060901Z 1 00h02m43.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.061707Z 1 00h02m43.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.193608Z 1 00h02m43.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.214156Z 1 00h02m43.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.237230Z 1 00h02m43.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.483514Z 1 00h02m43.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.676025Z 1 00h02m43.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.699577Z 1 00h02m43.710512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.734109Z 1 00h02m43.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.788169Z 1 00h02m43.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.839247Z 1 00h02m44.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.864723Z 1 00h02m44.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.944639Z 1 00h02m44.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:06.961000Z 1 00h02m44.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.201902Z 1 00h02m44.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.334871Z 1 00h02m44.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.365679Z 1 00h02m44.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.468159Z 1 00h02m44.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.620569Z 1 00h02m45.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.643553Z 1 00h02m45.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.657818Z 1 00h02m45.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.942550Z 1 00h02m45.510512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.963324Z 1 00h02m45.610512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:07.993823Z 1 00h02m45.810512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:08.010769Z 1 00h02m45.910512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:08.139855Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:08.140035Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:08.144219Z 1 00h02m46.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2024-11-21T23:17:09.897834Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:09.898046Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5294:704] 2024-11-21T23:17:09.901119Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:09.901299Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5294:704] 2024-11-21T23:17:09.904536Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:09.904728Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5294:704] 2024-11-21T23:17:09.909810Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:09.910214Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5294:704] 2024-11-21T23:17:09.910485Z 1 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:09.911889Z 2 00h05m16.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5294:704] 2024-11-21T23:17:09.926709Z 1 00h05m16.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:09.926886Z 2 00h05m16. ... 24-11-21T23:17:18.729679Z 1 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5287:697] 2024-11-21T23:17:18.729853Z 2 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5294:704] 2024-11-21T23:17:18.729913Z 3 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5301:711] 2024-11-21T23:17:18.729962Z 4 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5308:718] 2024-11-21T23:17:18.730007Z 5 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5315:725] 2024-11-21T23:17:18.730054Z 6 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5322:732] 2024-11-21T23:17:18.730101Z 7 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5329:739] 2024-11-21T23:17:18.730148Z 8 00h10m24.561024s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:18.730564Z 1 00h10m24.561024s :BS_LOAD_TEST ERROR: TabletId# 1 Generation# 4 recieved not OK, msg# TEvBlockResult {Status# ERROR ErrorReason# "Status# ERROR From# [82000000:1:0:2:0] NodeId# 3 QuorumTracker# {Erroneous# 00000111 Successful# 00000000}"} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2024-11-21T23:17:21.635505Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:21.640877Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:21.654502Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:21.661406Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:21.661997Z 8 00h20m55.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:21.790880Z 8 00h20m55.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:21.805863Z 8 00h20m55.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:21.835296Z 8 00h20m55.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.096389Z 8 00h20m55.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.184122Z 8 00h20m55.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.198206Z 8 00h20m55.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.241082Z 8 00h20m56.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.242304Z 8 00h20m56.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.261056Z 8 00h20m56.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.372465Z 8 00h20m56.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.390061Z 8 00h20m56.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.422511Z 8 00h20m56.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.614934Z 8 00h20m56.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.806979Z 8 00h20m56.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.830982Z 8 00h20m57.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.832637Z 8 00h20m57.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.849924Z 8 00h20m57.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:22.988723Z 8 00h20m57.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.005436Z 8 00h20m57.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.018087Z 8 00h20m57.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.037021Z 8 00h20m57.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.059050Z 8 00h20m57.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.328797Z 8 00h20m57.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.349060Z 8 00h20m57.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.433311Z 8 00h20m58.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.434917Z 8 00h20m58.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.471926Z 8 00h20m58.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.490284Z 8 00h20m58.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.541592Z 8 00h20m58.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.564972Z 8 00h20m58.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.614744Z 8 00h20m58.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.638130Z 8 00h20m58.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.848221Z 8 00h20m59.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:23.849810Z 8 00h20m59.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.179705Z 8 00h20m59.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.245179Z 8 00h20m59.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.427070Z 8 00h20m59.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.442477Z 8 00h20m59.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.471354Z 8 00h20m59.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.558720Z 8 00h20m59.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.638270Z 8 00h21m00.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.640468Z 8 00h21m00.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.763637Z 8 00h21m00.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:24.923435Z 8 00h21m00.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.031441Z 8 00h21m00.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.069519Z 8 00h21m00.652048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.202422Z 8 00h21m00.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.222808Z 8 00h21m00.952048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.304178Z 8 00h21m01.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.305685Z 8 00h21m01.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.321647Z 8 00h21m01.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.398913Z 8 00h21m01.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.414254Z 8 00h21m01.352048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.431712Z 8 00h21m01.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.444798Z 8 00h21m01.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.478870Z 8 00h21m01.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.727064Z 8 00h21m01.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.823048Z 8 00h21m02.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:25.954349Z 8 00h21m02.152048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:26.088043Z 8 00h21m02.252048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:26.183438Z 8 00h21m02.452048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:26.199551Z 8 00h21m02.552048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:26.369678Z 8 00h21m02.752048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:26.470250Z 8 00h21m02.852048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:26.582021Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:26.582385Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] 2024-11-21T23:17:26.584927Z 8 00h21m03.052048s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5336:746] >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> KqpJoin::LeftJoinWithNull+StreamLookupJoin >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2024-11-21T23:17:28.217014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875702166707310:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:28.217217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00238e/r3tmp/tmpczrQH7/pdisk_1.dat 2024-11-21T23:17:28.556702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:28.556864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:28.562091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:28.582011Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4487 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:28.754194Z node 1 :TX_PROXY DEBUG: actor# [1:7439875702166707321:2113] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:28.754324Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875702166707782:2427] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:28.754596Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875702166707344:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:28.754677Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875702166707344:2127], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:28.755020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:28.757325Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707027:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875702166707787:2428] 2024-11-21T23:17:28.757331Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707030:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875702166707788:2428] 2024-11-21T23:17:28.757400Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707030:2053] Subscribe: subscriber# [1:7439875702166707788:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:28.757401Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707027:2050] Subscribe: subscriber# [1:7439875702166707787:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:28.757462Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707033:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875702166707789:2428] 2024-11-21T23:17:28.757483Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707033:2056] Subscribe: subscriber# [1:7439875702166707789:2428], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:28.757514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707787:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702166707027:2050] 2024-11-21T23:17:28.757550Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707788:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702166707030:2053] 2024-11-21T23:17:28.757580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707789:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702166707033:2056] 2024-11-21T23:17:28.757597Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707027:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875702166707787:2428] 2024-11-21T23:17:28.757620Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702166707784:2428] 2024-11-21T23:17:28.757624Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707030:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875702166707788:2428] 2024-11-21T23:17:28.757637Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707033:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875702166707789:2428] 2024-11-21T23:17:28.757670Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702166707785:2428] 2024-11-21T23:17:28.757746Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875702166707783:2428][/dc-1] Set up state: owner# [1:7439875702166707344:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:28.757859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702166707786:2428] 2024-11-21T23:17:28.757921Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875702166707783:2428][/dc-1] Path was already updated: owner# [1:7439875702166707344:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:28.757986Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707787:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875702166707784:2428], cookie# 1 2024-11-21T23:17:28.758003Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707788:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875702166707785:2428], cookie# 1 2024-11-21T23:17:28.758034Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707789:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875702166707786:2428], cookie# 1 2024-11-21T23:17:28.762483Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707027:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875702166707787:2428], cookie# 1 2024-11-21T23:17:28.762524Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707030:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875702166707788:2428], cookie# 1 2024-11-21T23:17:28.762541Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707033:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875702166707789:2428], cookie# 1 2024-11-21T23:17:28.762571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707787:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702166707027:2050], cookie# 1 2024-11-21T23:17:28.762586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707788:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702166707030:2053], cookie# 1 2024-11-21T23:17:28.762599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875702166707789:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702166707033:2056], cookie# 1 2024-11-21T23:17:28.762634Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702166707784:2428], cookie# 1 2024-11-21T23:17:28.762690Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:28.762707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702166707785:2428], cookie# 1 2024-11-21T23:17:28.762732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:28.762775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702166707786:2428], cookie# 1 2024-11-21T23:17:28.762795Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875702166707783:2428][/dc-1] Unexpected sync response: sender# [1:7439875702166707786:2428], cookie# 1 2024-11-21T23:17:28.838903Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875702166707344:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:28.839429Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875702166707344:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... inInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:31.709990Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875715051610477:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:31.710339Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707027:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7439875715051610481:3026] 2024-11-21T23:17:31.710354Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707027:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2024-11-21T23:17:31.710421Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707027:2050] Subscribe: subscriber# [1:7439875715051610481:3026], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:31.710460Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707030:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7439875715051610482:3026] 2024-11-21T23:17:31.710477Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707030:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2024-11-21T23:17:31.710504Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707030:2053] Subscribe: subscriber# [1:7439875715051610482:3026], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:31.710534Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707033:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7439875715051610483:3026] 2024-11-21T23:17:31.710543Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707033:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2024-11-21T23:17:31.710563Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702166707033:2056] Subscribe: subscriber# [1:7439875715051610483:3026], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:31.710610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875715051610481:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875702166707027:2050] 2024-11-21T23:17:31.710677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875715051610482:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875702166707030:2053] 2024-11-21T23:17:31.710704Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875715051610483:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875702166707033:2056] 2024-11-21T23:17:31.710770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875715051610477:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875715051610478:3026] 2024-11-21T23:17:31.710785Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707027:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875715051610481:3026] 2024-11-21T23:17:31.710807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875715051610477:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875715051610479:3026] 2024-11-21T23:17:31.710816Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707030:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875715051610482:3026] 2024-11-21T23:17:31.710828Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875715051610477:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7439875702166707344:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:31.710833Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702166707033:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875715051610483:3026] 2024-11-21T23:17:31.710869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875715051610477:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7439875715051610480:3026] 2024-11-21T23:17:31.710893Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875715051610477:3026][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7439875702166707344:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:31.710905Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875702166707344:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2024-11-21T23:17:31.710994Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875702166707344:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7439875715051610477:3026] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T23:17:31.711098Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875702166707344:2127], cacheItem# { Subscriber: { Subscriber: [1:7439875715051610477:3026] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:31.711200Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875715051610484:3027], recipient# [1:7439875715051610475:2303], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.234089Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875702166707344:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.234288Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875702166707344:2127], cacheItem# { Subscriber: { Subscriber: [1:7439875706461675402:2679] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:32.234434Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875719346577794:3031], recipient# [1:7439875719346577793:2304], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.711140Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875702166707344:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.711258Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875702166707344:2127], cacheItem# { Subscriber: { Subscriber: [1:7439875715051610477:3026] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:32.711338Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875719346577802:3032], recipient# [1:7439875719346577801:2305], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2024-11-21T23:17:27.994437Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875699604032749:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:27.999103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00239b/r3tmp/tmpfwQjGZ/pdisk_1.dat 2024-11-21T23:17:28.360532Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:28.396166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:28.396293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:28.399355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63108 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:28.626635Z node 1 :TX_PROXY DEBUG: actor# [1:7439875703899000308:2137] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:28.626718Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875703899000739:2433] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:28.626882Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875703899000330:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:28.626930Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875703899000330:2150], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:28.627124Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:28.629152Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032681:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875703899000744:2434] 2024-11-21T23:17:28.629231Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875699604032681:2050] Subscribe: subscriber# [1:7439875703899000744:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:28.629304Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032684:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875703899000745:2434] 2024-11-21T23:17:28.629322Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875699604032684:2053] Subscribe: subscriber# [1:7439875703899000745:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:28.629381Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032687:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875703899000746:2434] 2024-11-21T23:17:28.629405Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875699604032687:2056] Subscribe: subscriber# [1:7439875703899000746:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:28.629457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000744:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875699604032681:2050] 2024-11-21T23:17:28.629482Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000745:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875699604032684:2053] 2024-11-21T23:17:28.629517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000746:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875699604032687:2056] 2024-11-21T23:17:28.629560Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875703899000741:2434] 2024-11-21T23:17:28.629588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875703899000742:2434] 2024-11-21T23:17:28.629661Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875703899000740:2434][/dc-1] Set up state: owner# [1:7439875703899000330:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:28.629767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875703899000743:2434] 2024-11-21T23:17:28.629811Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875703899000740:2434][/dc-1] Path was already updated: owner# [1:7439875703899000330:2150], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:28.629863Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000744:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875703899000741:2434], cookie# 1 2024-11-21T23:17:28.629884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000745:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875703899000742:2434], cookie# 1 2024-11-21T23:17:28.629898Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000746:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875703899000743:2434], cookie# 1 2024-11-21T23:17:28.629923Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032681:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875703899000744:2434] 2024-11-21T23:17:28.629952Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032681:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875703899000744:2434], cookie# 1 2024-11-21T23:17:28.629970Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032684:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875703899000745:2434] 2024-11-21T23:17:28.629983Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032684:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875703899000745:2434], cookie# 1 2024-11-21T23:17:28.629995Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032687:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875703899000746:2434] 2024-11-21T23:17:28.630007Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032687:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875703899000746:2434], cookie# 1 2024-11-21T23:17:28.630466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000744:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875699604032681:2050], cookie# 1 2024-11-21T23:17:28.630489Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000745:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875699604032684:2053], cookie# 1 2024-11-21T23:17:28.630536Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875703899000746:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875699604032687:2056], cookie# 1 2024-11-21T23:17:28.630582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875703899000741:2434], cookie# 1 2024-11-21T23:17:28.630612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:28.630652Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875703899000742:2434], cookie# 1 2024-11-21T23:17:28.630671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:28.630695Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875703899000743:2434], cookie# 1 2024-11-21T23:17:28.630708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875703899000740:2434][/dc-1] Unexpected sync response: sender# [1:7439875703899000743:2434], cookie# 1 2024-11-21T23:17:28.696228Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875703899000330:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:28.696650Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875703899000330:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 716783903566:3128][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439875699604032687:2056] 2024-11-21T23:17:31.221971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716783903555:3128][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439875716783903558:3128] 2024-11-21T23:17:31.221999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716783903555:3128][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439875716783903556:3128] 2024-11-21T23:17:31.222027Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875716783903555:3128][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7439875703899000330:2150], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:31.222050Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716783903555:3128][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7439875716783903560:3128] 2024-11-21T23:17:31.222070Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875716783903555:3128][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7439875703899000330:2150], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:31.222093Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875699604032687:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7439875716783903566:3128] 2024-11-21T23:17:31.222129Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875703899000330:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2024-11-21T23:17:31.222175Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875703899000330:2150], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7439875716783903555:3128] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T23:17:31.223056Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875703899000330:2150], cacheItem# { Subscriber: { Subscriber: [1:7439875716783903555:3128] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:31.223152Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875716783903568:3129], recipient# [1:7439875716783903553:2306], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.012197Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875703899000330:2150], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.012307Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875703899000330:2150], cacheItem# { Subscriber: { Subscriber: [1:7439875703899001032:2655] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:32.012376Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875721078870881:3133], recipient# [1:7439875721078870880:2307], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.223749Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875703899000330:2150], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.226886Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875703899000330:2150], cacheItem# { Subscriber: { Subscriber: [1:7439875716783903541:3125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:32.227034Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875721078870886:3134], recipient# [1:7439875721078870885:2308], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.995452Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875699604032749:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:32.995559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:33.013426Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875703899000330:2150], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:33.013578Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875703899000330:2150], cacheItem# { Subscriber: { Subscriber: [1:7439875703899001032:2655] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:33.013665Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875725373838201:3140], recipient# [1:7439875725373838200:2309], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:33.231171Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875703899000330:2150], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:33.231341Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7439875703899000330:2150], cacheItem# { Subscriber: { Subscriber: [1:7439875716783903541:3125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:33.231434Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875725373838206:3141], recipient# [1:7439875725373838205:2310], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [FAIL] Test command err: assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp:149, virtual void NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldCreateGetCheckpoints::Execute_(NUnitTest::TTestContext &): (getResult.second.Empty())
: Error: GRpc error: (14): Socket closed
: Error: Grpc error response on endpoint ghrun-uc25mmfz34.auto.internal:26837 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldCreateGetCheckpoints::Execute_(NUnitTest::TTestContext&)+2584 (0xFA6A1B8) std::__y1::__function::__func, void ()>::operator()()+280 (0xFAA6878) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTCheckpointStorageTest::TCurrentTest::Execute()+1204 (0xFAA5A44) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7FCC70F3AD90) __libc_start_main+128 (0x7FCC70F3AE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp:35, TCheckpointStoragePtr NFq::(anonymous namespace)::GetCheckpointStorage(const char *, IEntityIdGenerator::TPtr): (issues.Empty())
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26837: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26837 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xFA61804) NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldGetCheckpointsEmpty::Execute_(NUnitTest::TTestContext&)+553 (0xFA7BB09) std::__y1::__function::__func, void ()>::operator()()+280 (0xFAA6878) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTCheckpointStorageTest::TCurrentTest::Execute()+1204 (0xFAA5A44) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7FCC70F3AD90) __libc_start_main+128 (0x7FCC70F3AE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp:35, TCheckpointStoragePtr NFq::(anonymous namespace)::GetCheckpointStorage(const char *, IEntityIdGenerator::TPtr): (issues.Empty())
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26837: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26837 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xFA61804) NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldDeleteGraph::Execute_(NUnitTest::TTestContext&)+954 (0xFA7D39A) std::__y1::__function::__func, void ()>::operator()()+280 (0xFAA6878) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTCheckpointStorageTest::TCurrentTest::Execute()+1204 (0xFAA5A44) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7FCC70F3AD90) __libc_start_main+128 (0x7FCC70F3AE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp:35, TCheckpointStoragePtr NFq::(anonymous namespace)::GetCheckpointStorage(const char *, IEntityIdGenerator::TPtr): (issues.Empty())
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:26837: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26837 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xFA61804) NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldDeleteMarkedCheckpoints::Execute_(NUnitTest::TTestContext&)+776 (0xFA8B378) std::__y1::__function::__func, void ()>::operator()()+280 (0xFAA6878) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTCheckpointStorageTest::TCurrentTest::Execute()+1204 (0xFAA5A44) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7FCC70F3AD90) __libc_start_main+128 (0x7FCC70F3AE40) _start+41 (0xDAB6029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [FAIL] Test command err: assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp:218, virtual void NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldMarkCheckpointsGc::Execute_(NUnitTest::TTestContext &): (getResult.first.size() == 4UL) failed: (0 != 4) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldMarkCheckpointsGc::Execute_(NUnitTest::TTestContext&)+5516 (0xFA858AC) std::__y1::__function::__func, void ()>::operator()()+280 (0xFAA6878) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTCheckpointStorageTest::TCurrentTest::Execute()+1204 (0xFAA5A44) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F676E9E4D90) __libc_start_main+128 (0x7F676E9E4E40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp:35, TCheckpointStoragePtr NFq::(anonymous namespace)::GetCheckpointStorage(const char *, IEntityIdGenerator::TPtr): (issues.Empty())
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19433: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19433 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xFA61804) NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldNotDeleteUnmarkedCheckpoints::Execute_(NUnitTest::TTestContext&)+733 (0xFA9115D) std::__y1::__function::__func, void ()>::operator()()+280 (0xFAA6878) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTCheckpointStorageTest::TCurrentTest::Execute()+1204 (0xFAA5A44) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F676E9E4D90) __libc_start_main+128 (0x7F676E9E4E40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp:35, TCheckpointStoragePtr NFq::(anonymous namespace)::GetCheckpointStorage(const char *, IEntityIdGenerator::TPtr): (issues.Empty())
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19433: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:19433 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xFA61804) NFq::NTestSuiteTCheckpointStorageTest::TTestCaseShouldRetryOnExistingGraphDescId::Execute_(NUnitTest::TTestContext&)+2135 (0xFA9A057) std::__y1::__function::__func, void ()>::operator()()+280 (0xFAA6878) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTCheckpointStorageTest::TCurrentTest::Execute()+1204 (0xFAA5A44) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F676E9E4D90) __libc_start_main+128 (0x7F676E9E4E40) _start+41 (0xDAB6029) >> SystemView::QueryStatsAllTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2024-11-21T23:17:29.096252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875706603384442:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:29.096462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00238b/r3tmp/tmpfJP5KW/pdisk_1.dat 2024-11-21T23:17:29.545010Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:29.550410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:29.550514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:29.565278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12591 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:29.765245Z node 1 :TX_PROXY DEBUG: actor# [1:7439875706603384519:2138] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:29.765310Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875706603384950:2435] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:29.765455Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875706603384542:2151], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:29.765490Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875706603384542:2151], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:29.766073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:29.771904Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416891:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875706603384955:2436] 2024-11-21T23:17:29.771977Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702308416891:2051] Subscribe: subscriber# [1:7439875706603384955:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:29.772061Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416897:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875706603384957:2436] 2024-11-21T23:17:29.772080Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702308416897:2057] Subscribe: subscriber# [1:7439875706603384957:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:29.772142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384955:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702308416891:2051] 2024-11-21T23:17:29.772175Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384957:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702308416897:2057] 2024-11-21T23:17:29.772236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875706603384952:2436] 2024-11-21T23:17:29.772267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875706603384954:2436] 2024-11-21T23:17:29.772322Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875706603384951:2436][/dc-1] Set up state: owner# [1:7439875706603384542:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:29.772444Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416894:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875706603384956:2436] 2024-11-21T23:17:29.772498Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702308416894:2054] Subscribe: subscriber# [1:7439875706603384956:2436], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:29.772498Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384955:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875706603384952:2436], cookie# 1 2024-11-21T23:17:29.772515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384956:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875706603384953:2436], cookie# 1 2024-11-21T23:17:29.772527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384957:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875706603384954:2436], cookie# 1 2024-11-21T23:17:29.772562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384956:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875702308416894:2054] 2024-11-21T23:17:29.772582Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416894:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875706603384956:2436], cookie# 1 2024-11-21T23:17:29.772600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875706603384953:2436] 2024-11-21T23:17:29.772620Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416894:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875706603384956:2436] 2024-11-21T23:17:29.772645Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416891:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875706603384955:2436] 2024-11-21T23:17:29.772662Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875706603384951:2436][/dc-1] Path was already updated: owner# [1:7439875706603384542:2151], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:29.772682Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416891:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875706603384955:2436], cookie# 1 2024-11-21T23:17:29.772703Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416897:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875706603384957:2436] 2024-11-21T23:17:29.772705Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384956:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702308416894:2054], cookie# 1 2024-11-21T23:17:29.772716Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416897:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875706603384957:2436], cookie# 1 2024-11-21T23:17:29.772723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384955:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702308416891:2051], cookie# 1 2024-11-21T23:17:29.772759Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875706603384957:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875702308416897:2057], cookie# 1 2024-11-21T23:17:29.772785Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875706603384953:2436], cookie# 1 2024-11-21T23:17:29.772826Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:29.772845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875706603384952:2436], cookie# 1 2024-11-21T23:17:29.772862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:29.772886Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875706603384954:2436], cookie# 1 2024-11-21T23:17:29.772900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875706603384951:2436][/dc-1] Unexpected sync response: sender# [1:7439875706603384954:2436], cookie# 1 2024-11-21T23:17:29.847756Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875706603384542:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:29.848153Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875706603384542:2151], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... tual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2024-11-21T23:17:31.444705Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7439875715193320089:2854], recipient# [1:7439875715193320080:2851], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:17:31.444740Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875715193320080:2851] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T23:17:31.444826Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875715193320080:2851] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2024-11-21T23:17:31.446496Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875715193320080:2851] Handle TEvDescribeSchemeResult Forward to# [1:7439875715193320079:2850] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1732231051300 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1732231051300 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T23:17:31.503356Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416894:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439875709682081672:2098] 2024-11-21T23:17:31.503393Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702308416894:2054] Unsubscribe: subscriber# [3:7439875709682081672:2098], path# /dc-1/USER_0 2024-11-21T23:17:31.503432Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416891:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439875709682081670:2098] 2024-11-21T23:17:31.503473Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702308416891:2051] Unsubscribe: subscriber# [3:7439875709682081670:2098], path# /dc-1/USER_0 2024-11-21T23:17:31.506333Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875702308416897:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7439875709682081673:2098] 2024-11-21T23:17:31.506362Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875702308416897:2057] Unsubscribe: subscriber# [3:7439875709682081673:2098], path# /dc-1/USER_0 2024-11-21T23:17:31.519147Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2024-11-21T23:17:31.522525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:17:31.669847Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439875709682081714:2113], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:31.669984Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875709682081714:2113], cacheItem# { Subscriber: { Subscriber: [3:7439875709682081805:2167] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:31.670122Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875713977049419:2346], recipient# [3:7439875713977049418:2298], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.671259Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439875709682081714:2113], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.671340Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875709682081714:2113], cacheItem# { Subscriber: { Subscriber: [3:7439875709682081805:2167] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:32.671404Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875718272016721:2347], recipient# [3:7439875718272016720:2299], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup >> TStorageTenantTest::LsLs [GOOD] >> KqpJoin::TwoJoinsWithQueryService |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> KqpJoin::RightSemiJoin_SimpleKey >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> KqpJoinOrder::TPCH3-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::QueryStatsAllTables [GOOD] Test command err: 2024-11-21T23:15:44.687683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875256945027154:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:44.687765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0018fb/r3tmp/tmpkF8ej2/pdisk_1.dat 2024-11-21T23:15:45.713820Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:46.433997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:46.434080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:46.448292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:46.478614Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19930, node 1 2024-11-21T23:15:46.531155Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:46.531197Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:15:46.718150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:47.014379Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:47.014416Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:47.014429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:47.014516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:47.840247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:47.916557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:48.165713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:49.690687Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875256945027154:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:49.690789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:50.846556Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2024-11-21T23:15:50.846599Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2024-11-21T23:15:50.897627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875282714832067:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:50.902437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:50.902856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875282714832079:2318], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:50.910186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T23:15:50.956925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875282714832081:2319], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T23:15:51.035381Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F00028AB88 2024-11-21T23:15:51.035432Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7439875282714832041:2313], queryUid: , queryText: "\n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n ", keepInCache: 0, split: 0{ TraceId: 01jd8g5bzfa85gebtffyphym05, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQxMDdiZWMtYzZkZjg3MzUtMzNmYTEyYzMtMmE1ZDgwZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2024-11-21T23:15:51.035621Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2024-11-21T23:15:51.035677Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7439875282714832041:2313], queueSize: 1 2024-11-21T23:15:51.036174Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7439875282714832041:2313], compileActor: [1:7439875287009799461:2323] 2024-11-21T23:15:51.417869Z node 1 :KQP_YQL INFO: TraceId: 01jd8g5bzfa85gebtffyphym05, SessionId: CompileActor 2024-11-21 23:15:51.416 INFO ydb-core-sys_view-ut_kqp(pid=272479, tid=0x00007F8A7A5EB640) [KQP] kqp_host.cpp:1338: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! $17 $2)) ) 2024-11-21T23:15:51.419604Z node 1 :KQP_YQL TRACE: TraceId: 01jd8g5bzfa85gebtffyphym05, SessionId: CompileActor 2024-11-21 23:15:51.418 TRACE ydb-core-sys_view-ut_kqp(pid=272479, tid=0x00007F8A7A5EB640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! (Commit! $17 $2) (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2024-11-21T23:15:51.420217Z node 1 :KQP_YQL DEBUG: TraceId: 01jd8g5bzfa85gebtffyphym05, SessionId: CompileActor 2024-11-21 23:15:51.420 DEBUG ydb-core-sys_view-ut_kqp(pid=272479, tid=0x00007F8A7A5EB640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 530us 2024-11-21T23:15:51.429332Z node 1 :KQP_YQL INFO: TraceId: 01jd8g5bzfa85gebtffyphym05, SessionId: CompileActor 2024-11-21 23:15:51.429 INFO ydb-core-sys_view-ut_kqp(pid=272479, tid=0x00007F8A7A5EB640) [RESULT] yql_result_provider.cpp:1416: RewriteIO 2024-11-21T23:15:51.436234Z node 1 :KQP_YQL DEBUG: TraceId: 01jd8g5bzfa85gebtffyphym05, SessionId: CompileActor 2024-11-21 23:15:51.436 DEBUG ydb-core-sys_view-ut_kqp(pid=272479, tid=0x00007F8A7A5EB640) ... :17.311423Z node 46 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:17.311600Z node 46 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:17.925167Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:17.963987Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:21.797830Z node 46 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[46:7439875652213702470:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:21.797994Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:23.456386Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7439875682278474548:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:23.456386Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [46:7439875682278474556:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:23.456476Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:23.460192Z node 46 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:17:23.511056Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [46:7439875682278474562:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:17:23.789583Z node 46 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8g86by06yym2z1yqtyhhga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=YTA0OGQ3ZDktZTYzZTFhY2MtNTY3ZWMwNzYtZjUyZjQzNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:23.952102Z node 46 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8g86q1chaffkdm7mp1yr0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=46&id=YjZmM2Y0MjAtZjNlMGI0MTYtYzE0YThlNTktNWI4OWM1NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:23.958738Z node 46 :SYSTEM_VIEWS INFO: Scan started, actor: [46:7439875682278474717:2334], owner: [46:7439875682278474713:2332], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_minute] 2024-11-21T23:17:23.959260Z node 46 :SYSTEM_VIEWS INFO: Scan prepared, actor: [46:7439875682278474717:2334], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:17:23.959870Z node 46 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [46:7439875682278474717:2334], row count: 1, finished: 1 2024-11-21T23:17:23.959928Z node 46 :SYSTEM_VIEWS INFO: Scan finished, actor: [46:7439875682278474717:2334], owner: [46:7439875682278474713:2332], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_minute] 2024-11-21T23:17:23.962633Z node 46 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231043949, txId: 281474976715662] shutting down 2024-11-21T23:17:26.758471Z node 51 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[51:7439875694489688720:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:26.759327Z node 51 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0018fb/r3tmp/tmpoOdp4n/pdisk_1.dat 2024-11-21T23:17:27.082938Z node 51 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:27.138202Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:27.138353Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:27.148963Z node 51 :HIVE WARN: HIVE#72057594037968897 Node(51, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30250, node 51 2024-11-21T23:17:27.288124Z node 51 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:27.288182Z node 51 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:27.288196Z node 51 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:27.288376Z node 51 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:27.821241Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:27.857665Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:31.763254Z node 51 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[51:7439875694489688720:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:31.763440Z node 51 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:33.274833Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [51:7439875724554460736:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:33.275010Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:33.275575Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [51:7439875724554460748:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:33.289003Z node 51 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:17:33.325141Z node 51 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [51:7439875724554460750:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:17:33.732945Z node 51 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jd8g8fyq05xj6fdcybw2j6k7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=51&id=MjMwMjAwMjgtYTkxNTZiOGEtNTU4MjU4OWQtNzFkMjA0YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:34.008778Z node 51 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jd8g8gdpcs19dpkxvjdcw5ky, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=51&id=MjYwOTRmZjUtZDNlMjE0NjktZTRkMWVhYWEtOGQ5MWQyYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:34.011909Z node 51 :SYSTEM_VIEWS INFO: Scan started, actor: [51:7439875728849428197:2333], owner: [51:7439875728849428193:2331], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2024-11-21T23:17:34.013554Z node 51 :SYSTEM_VIEWS INFO: Scan prepared, actor: [51:7439875728849428197:2333], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:17:34.014137Z node 51 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [51:7439875728849428197:2333], row count: 1, finished: 1 2024-11-21T23:17:34.014174Z node 51 :SYSTEM_VIEWS INFO: Scan finished, actor: [51:7439875728849428197:2333], owner: [51:7439875728849428193:2331], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2024-11-21T23:17:34.017802Z node 51 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231054006, txId: 281474976710662] shutting down |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2024-11-21T23:17:31.364337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875716611616141:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:31.364395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00235e/r3tmp/tmpoGGGI3/pdisk_1.dat 2024-11-21T23:17:31.703694Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:31.889260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:31.889405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:31.890954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:31.891021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:31.896472Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:31.898255Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:17:31.898387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:31.900606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14520 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:32.184401Z node 1 :TX_PROXY DEBUG: actor# [1:7439875716611616360:2139] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:32.184470Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875720906584101:2451] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:32.184607Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875716611616407:2163], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:32.184646Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875716611616407:2163], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:32.184848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:32.187078Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616026:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875720906584106:2452] 2024-11-21T23:17:32.187163Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875716611616026:2052] Subscribe: subscriber# [1:7439875720906584106:2452], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:32.187162Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616029:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875720906584107:2452] 2024-11-21T23:17:32.187205Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875716611616029:2055] Subscribe: subscriber# [1:7439875720906584107:2452], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:32.187219Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616032:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875720906584108:2452] 2024-11-21T23:17:32.187248Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875716611616032:2058] Subscribe: subscriber# [1:7439875720906584108:2452], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:32.187296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584106:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875716611616026:2052] 2024-11-21T23:17:32.187332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584107:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875716611616029:2055] 2024-11-21T23:17:32.187371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584108:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875716611616032:2058] 2024-11-21T23:17:32.187438Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616026:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875720906584106:2452] 2024-11-21T23:17:32.187460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875720906584103:2452] 2024-11-21T23:17:32.187483Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616029:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875720906584107:2452] 2024-11-21T23:17:32.187497Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616032:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875720906584108:2452] 2024-11-21T23:17:32.187508Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875720906584104:2452] 2024-11-21T23:17:32.187559Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875720906584102:2452][/dc-1] Set up state: owner# [1:7439875716611616407:2163], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:32.187686Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875720906584105:2452] 2024-11-21T23:17:32.187734Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875720906584102:2452][/dc-1] Path was already updated: owner# [1:7439875716611616407:2163], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:32.187775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584106:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875720906584103:2452], cookie# 1 2024-11-21T23:17:32.187792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584107:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875720906584104:2452], cookie# 1 2024-11-21T23:17:32.187806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584108:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875720906584105:2452], cookie# 1 2024-11-21T23:17:32.192143Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616026:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875720906584106:2452], cookie# 1 2024-11-21T23:17:32.192193Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616029:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875720906584107:2452], cookie# 1 2024-11-21T23:17:32.192227Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875716611616032:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875720906584108:2452], cookie# 1 2024-11-21T23:17:32.192299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584106:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875716611616026:2052], cookie# 1 2024-11-21T23:17:32.192316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584107:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875716611616029:2055], cookie# 1 2024-11-21T23:17:32.192329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875720906584108:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875716611616032:2058], cookie# 1 2024-11-21T23:17:32.192360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875720906584103:2452], cookie# 1 2024-11-21T23:17:32.192389Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:32.192403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875720906584104:2452], cookie# 1 2024-11-21T23:17:32.192429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:32.192481Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875720906584105:2452], cookie# 1 2024-11-21T23:17:32.192497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875720906584102:2452][/dc-1] Unexpected sync response: sender# [1:7439875720906584105:2452], cookie# 1 2024-11-21T23:17:32.287502Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875716611616407:2163], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:32.287841Z node 1 :TX_PROXY_SC ... rsion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:36.312266Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875713363604298:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875730543473537:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:36.312369Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875734838440866:2136], recipient# [2:7439875730543473534:2279], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:36.312568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439875730543473534:2279], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:36.372637Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875713363604298:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:36.372806Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875713363604298:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875730543473536:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:36.372866Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875713363604298:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875730543473537:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:36.373002Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875734838440867:2137], recipient# [2:7439875730543473534:2279], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:36.373180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439875730543473534:2279], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:36.449492Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875713363604298:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:36.449619Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875713363604298:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875730543473536:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:36.449663Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875713363604298:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875730543473537:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:36.449768Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875734838440870:2138], recipient# [2:7439875730543473534:2279], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:36.449934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439875730543473534:2279], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:36.509801Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875713363604298:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:36.509932Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875713363604298:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875730543473536:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:36.509979Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875713363604298:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875730543473537:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:36.510084Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875734838440871:2139], recipient# [2:7439875730543473534:2279], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:36.510293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7439875730543473534:2279], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightSemi_2 >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TestKinesisHttpProxy::TestPing [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> TPersQueueMirrorer::ValidStartStream >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestKinesisHttpProxy::TestRequestBadJson |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2024-11-21T23:17:29.237049Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875708137675038:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:29.237961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002370/r3tmp/tmpbw85hO/pdisk_1.dat 2024-11-21T23:17:29.670588Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:29.696321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:29.696404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:29.713306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8551 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:29.954064Z node 1 :TX_PROXY DEBUG: actor# [1:7439875708137675256:2092] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:29.954131Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875708137675756:2433] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:29.954224Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875708137675288:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:29.954253Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875708137675288:2114], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:29.954437Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:29.956353Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137674998:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875708137675761:2434] 2024-11-21T23:17:29.956421Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875708137674998:2049] Subscribe: subscriber# [1:7439875708137675761:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:29.956496Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137675001:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875708137675762:2434] 2024-11-21T23:17:29.956513Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875708137675001:2052] Subscribe: subscriber# [1:7439875708137675762:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:29.956532Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137675004:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875708137675763:2434] 2024-11-21T23:17:29.956545Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875708137675004:2055] Subscribe: subscriber# [1:7439875708137675763:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:29.956612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675761:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875708137674998:2049] 2024-11-21T23:17:29.956659Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675762:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875708137675001:2052] 2024-11-21T23:17:29.956692Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675763:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875708137675004:2055] 2024-11-21T23:17:29.956735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875708137675758:2434] 2024-11-21T23:17:29.956769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875708137675759:2434] 2024-11-21T23:17:29.956837Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875708137675757:2434][/dc-1] Set up state: owner# [1:7439875708137675288:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:29.956930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875708137675760:2434] 2024-11-21T23:17:29.956981Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875708137675757:2434][/dc-1] Path was already updated: owner# [1:7439875708137675288:2114], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:29.957015Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675761:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875708137675758:2434], cookie# 1 2024-11-21T23:17:29.957032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675762:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875708137675759:2434], cookie# 1 2024-11-21T23:17:29.957042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675763:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875708137675760:2434], cookie# 1 2024-11-21T23:17:29.957075Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137674998:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875708137675761:2434] 2024-11-21T23:17:29.957095Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137674998:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875708137675761:2434], cookie# 1 2024-11-21T23:17:29.957111Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137675001:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875708137675762:2434] 2024-11-21T23:17:29.957156Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137675001:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875708137675762:2434], cookie# 1 2024-11-21T23:17:29.957180Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137675004:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875708137675763:2434] 2024-11-21T23:17:29.957192Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875708137675004:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875708137675763:2434], cookie# 1 2024-11-21T23:17:29.958524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675761:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875708137674998:2049], cookie# 1 2024-11-21T23:17:29.958545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675762:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875708137675001:2052], cookie# 1 2024-11-21T23:17:29.958558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875708137675763:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875708137675004:2055], cookie# 1 2024-11-21T23:17:29.958608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875708137675758:2434], cookie# 1 2024-11-21T23:17:29.958638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:29.958653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875708137675759:2434], cookie# 1 2024-11-21T23:17:29.958680Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:29.958702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875708137675760:2434], cookie# 1 2024-11-21T23:17:29.958718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875708137675757:2434][/dc-1] Unexpected sync response: sender# [1:7439875708137675760:2434], cookie# 1 2024-11-21T23:17:30.018590Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875708137675288:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:30.018915Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875708137675288:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... sActor] ActorId: [4:7439875739321782598:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:37.923549Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439875722141912375:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:37.923681Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875722141912375:2100], cacheItem# { Subscriber: { Subscriber: [4:7439875739321782600:2779] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:37.923732Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875722141912375:2100], cacheItem# { Subscriber: { Subscriber: [4:7439875739321782601:2780] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:37.923836Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439875739321782624:2785], recipient# [4:7439875739321782598:2343], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:37.924215Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7439875739321782598:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:38.009399Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439875722141912375:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:38.009522Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875722141912375:2100], cacheItem# { Subscriber: { Subscriber: [4:7439875726436880465:2615] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:38.009603Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439875743616749922:2786], recipient# [4:7439875743616749921:2345], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:38.015495Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439875722141912375:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:38.015659Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875722141912375:2100], cacheItem# { Subscriber: { Subscriber: [4:7439875739321782600:2779] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:38.015730Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875722141912375:2100], cacheItem# { Subscriber: { Subscriber: [4:7439875739321782601:2780] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:38.015856Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439875743616749923:2787], recipient# [4:7439875739321782598:2343], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:38.016453Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7439875739321782598:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:38.068219Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7439875722141912375:2100], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:38.068381Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875722141912375:2100], cacheItem# { Subscriber: { Subscriber: [4:7439875739321782600:2779] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:38.068460Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7439875722141912375:2100], cacheItem# { Subscriber: { Subscriber: [4:7439875739321782601:2780] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:38.068596Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7439875743616749924:2788], recipient# [4:7439875739321782598:2343], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:38.068822Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7439875739321782598:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TSchemeShardAuditSettings::AlterSubdomain >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> TestKinesisHttpProxy::GoodRequestGetRecords >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull >> TestKinesisHttpProxy::TestWrongStream2 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TestKinesisHttpProxy::ListShards [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> CdcStreamChangeCollector::OldImage [GOOD] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> TestKinesisHttpProxy::TestConsumersEmptyNames >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> KqpLimits::ComputeActorMemoryAllocationFailure >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] >> KqpJoin::LeftJoinWithNull-StreamLookupJoin >> KqpExplain::LimitOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2024-11-21T23:17:13.457881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:13.469463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:13.469722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002c4e/r3tmp/tmp0dxHvM/pdisk_1.dat 2024-11-21T23:17:13.956260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:17:14.026243Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:14.030273Z node 1 :TABLET_SAUSAGECACHE INFO: Config updated MemoryLimit: 33554432 2024-11-21T23:17:14.088263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:14.088436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:14.115479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:14.248395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:14.300030Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:17:14.300355Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:17:14.341520Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:17:14.341672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:17:14.343623Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:17:14.343737Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:17:14.343797Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:17:14.344205Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:17:14.370822Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:17:14.371074Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:17:14.371237Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:17:14.371284Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:17:14.371335Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:17:14.371380Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.372419Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:17:14.372573Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:17:14.372708Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:17:14.372807Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:14.372860Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:14.372918Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:17:14.372982Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:14.373174Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:17:14.373439Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:17:14.373541Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:17:14.375166Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:14.386089Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:17:14.386258Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:17:14.572675Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:17:14.583603Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:17:14.583724Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.584387Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:14.584451Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:14.584563Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2024-11-21T23:17:14.584909Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2024-11-21T23:17:14.585134Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:17:14.586046Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:14.586133Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2024-11-21T23:17:14.588655Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:17:14.589121Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:14.590915Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:17:14.590957Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.591497Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:17:14.591555Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:17:14.591614Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:14.592517Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:14.592553Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:17:14.592605Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:17:14.592679Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:14.592724Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:17:14.592793Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:14.596312Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:14.598246Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:17:14.598415Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:17:14.598468Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:17:14.608293Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:17:14.608463Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T23:17:14.608513Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T23:17:14.608548Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T23:17:14.632653Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:17:15.003401Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:17:15.003502Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:15.003759Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:15.003811Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:15.003863Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T23:17:15.004100Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T23:17:15.004239Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:17:15.004409Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:15.005155Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:15.021317Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T23:17:15.021415Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T23:17:15.021516Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:15.021560Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:15.021629Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:15.021713Z node 1 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:380:2375], ex ... 2024-11-21T23:17:39.100264Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2024-11-21T23:17:39.100653Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:39.102597Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2024-11-21T23:17:39.102683Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:39.103506Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1000 txid# 281474976715657} 2024-11-21T23:17:39.103582Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2024-11-21T23:17:39.103659Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:39.105118Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:39.105158Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:17:39.105218Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2024-11-21T23:17:39.105302Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:39.105367Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2024-11-21T23:17:39.105465Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:39.106749Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:39.109135Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2024-11-21T23:17:39.109325Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2024-11-21T23:17:39.109399Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:17:39.120711Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:17:39.120876Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2024-11-21T23:17:39.120933Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2024-11-21T23:17:39.120969Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2024-11-21T23:17:39.146418Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:17:39.517621Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 498 RawX2: 17179871629 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:17:39.517702Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:39.517907Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:39.517981Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:17:39.518047Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2024-11-21T23:17:39.518241Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2024-11-21T23:17:39.518359Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:17:39.518632Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:39.519490Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:39.539157Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 281474976715658} 2024-11-21T23:17:39.539279Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T23:17:39.539376Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:39.539421Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:39.539506Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:39.539603Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:380:2375], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:39.539678Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2024-11-21T23:17:39.539805Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:39.542315Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2024-11-21T23:17:39.543881Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2024-11-21T23:17:39.561900Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:856:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.562048Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:866:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.562141Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.568401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:17:39.575197Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:39.807020Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:17:39.818723Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:870:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:17:40.356657Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8g8p33855vpz0418vzbshk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWY4NmNlNTYtZmFjNDRjZTItZTU2NjIwODctMjk0MDNhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:40.369586Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:966:2757], serverId# [4:967:2758], sessionId# [0:0:0] 2024-11-21T23:17:40.373267Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2024-11-21T23:17:40.376982Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8g8p33855vpz0418vzbshk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWY4NmNlNTYtZmFjNDRjZTItZTU2NjIwODctMjk0MDNhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:40.409041Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8g8p33855vpz0418vzbshk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWY4NmNlNTYtZmFjNDRjZTItZTU2NjIwODctMjk0MDNhOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:40.409894Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:17:40.411907Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1732231060411773 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T23:17:40.423511Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:17:40.423674Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2024-11-21T23:17:40.423803Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T23:17:40.423872Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:40.425019Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2024-11-21T23:17:40.425113Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:40.525356Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8g8pye8fs3tnaatpzws25y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzE2OWU0MDctMWZhMGYyYjMtMTNiOTc5MDgtOTgyNDJjYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:40.525746Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:17:40.526806Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1732231060526705 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2024-11-21T23:17:40.537999Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:17:40.538178Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2024-11-21T23:17:40.538228Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:40.540471Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1001:2782], serverId# [4:1002:2783], sessionId# [0:0:0] 2024-11-21T23:17:40.546097Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1003:2784], serverId# [4:1004:2785], sessionId# [0:0:0] >> KqpJoin::TwoJoinsWithQueryService [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2024-11-21T23:17:26.531719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875694567449894:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:26.531965Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:17:26.579674Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875693040262714:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:26.579726Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0023bd/r3tmp/tmp7Oc6Jj/pdisk_1.dat 2024-11-21T23:17:27.257309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:27.257581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:27.262180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:27.262280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:27.263640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:27.267821Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:17:27.268728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:27.269259Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:32416 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:27.580270Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875693040262945:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:27.580449Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875693040262945:2104], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:27.580479Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [2:7439875693040262945:2104], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:27.580696Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439875697335230258:2107][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:27.583277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449779:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [2:7439875697335230262:2107] 2024-11-21T23:17:27.583290Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449782:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [2:7439875697335230263:2107] 2024-11-21T23:17:27.590682Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875694567449782:2055] Subscribe: subscriber# [2:7439875697335230263:2107], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:27.590684Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875694567449779:2052] Subscribe: subscriber# [2:7439875697335230262:2107], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:27.594517Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449785:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [2:7439875697335230264:2107] 2024-11-21T23:17:27.594585Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875694567449785:2058] Subscribe: subscriber# [2:7439875697335230264:2107], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:27.597324Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449782:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:7439875697335230263:2107] 2024-11-21T23:17:27.597521Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449779:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:7439875697335230262:2107] 2024-11-21T23:17:27.597544Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449785:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:7439875697335230264:2107] 2024-11-21T23:17:27.596317Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439875697335230263:2107][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875694567449782:2055] 2024-11-21T23:17:27.597296Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439875697335230262:2107][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875694567449779:2052] 2024-11-21T23:17:27.597390Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439875697335230264:2107][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875694567449785:2058] 2024-11-21T23:17:27.597504Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439875697335230258:2107][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [2:7439875697335230260:2107] 2024-11-21T23:17:27.597574Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439875697335230258:2107][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [2:7439875697335230259:2107] 2024-11-21T23:17:27.597633Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7439875697335230258:2107][/dc-1] Set up state: owner# [2:7439875693040262945:2104], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:27.597745Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439875697335230258:2107][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [2:7439875697335230261:2107] 2024-11-21T23:17:27.597810Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7439875697335230258:2107][/dc-1] Path was already updated: owner# [2:7439875693040262945:2104], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:27.604985Z node 1 :TX_PROXY DEBUG: actor# [1:7439875694567450113:2139] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:27.605055Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875698862417880:2461] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:27.605239Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875694567450140:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:27.605373Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875698862417856:2449][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7439875694567450140:2153], cookie# 1 2024-11-21T23:17:27.605460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875698862417860:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875698862417857:2449], cookie# 1 2024-11-21T23:17:27.605493Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875698862417861:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875698862417858:2449], cookie# 1 2024-11-21T23:17:27.605507Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875698862417862:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875698862417859:2449], cookie# 1 2024-11-21T23:17:27.605531Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449779:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875698862417860:2449], cookie# 1 2024-11-21T23:17:27.605560Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449782:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875698862417861:2449], cookie# 1 2024-11-21T23:17:27.605575Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875694567449785:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875698862417862:2449], cookie# 1 2024-11-21T23:17:27.605610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875698862417860:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875694567449779:2052], cookie# 1 2024-11-21T23:17:27.605652Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875698862417861:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875694567449782:2055], cookie# 1 2024-11-21T23:17:27.605668Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875698862417862:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875694567449785:2058], cookie# 1 2024-11-21T23:17:27.605727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875698862417856:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875698862417857:2449], cookie# 1 2024-11-21T23:17:27.605761Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875698862417856:2449][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:27.605808Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875698862417856:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875698862417858:2449], cookie# 1 2024-11-21T23:17:27.605835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875698862417856:2449][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:27.605876Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875698862417856:2449][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875698862417859:2449], cookie# 1 2024-11-21T23:17:27.605902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875698862417856:2449][/dc-1] Unexpected sync response: sender# [1:7439875698862417859:2449], cookie# 1 2024-11-21T23:17:27.605953Z node 1 :TX_ ... { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.296687Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875732019718999:2198], cacheItem# { Subscriber: { Subscriber: [3:7439875753494555565:2234] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:41.296757Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875732019718999:2198], cacheItem# { Subscriber: { Subscriber: [3:7439875753494555566:2235] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:41.296883Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875757789522885:2244], recipient# [3:7439875753494555563:2532], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.297298Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439875753494555563:2532], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:41.331728Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439875732019718999:2198], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.331865Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875732019718999:2198], cacheItem# { Subscriber: { Subscriber: [3:7439875732019719020:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:41.331950Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875757789522887:2245], recipient# [3:7439875757789522886:2535], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.369042Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439875732019718999:2198], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.369193Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875732019718999:2198], cacheItem# { Subscriber: { Subscriber: [3:7439875753494555565:2234] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:41.369254Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875732019718999:2198], cacheItem# { Subscriber: { Subscriber: [3:7439875753494555566:2235] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:41.369374Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875757789522888:2246], recipient# [3:7439875753494555563:2532], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.369877Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7439875753494555563:2532], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:41.681489Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875693040262945:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.681622Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875693040262945:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875710220132178:2115] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:41.681704Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875757464772512:2142], recipient# [2:7439875757464772511:2304], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.738923Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875693040262945:2104], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:41.739043Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875693040262945:2104], cacheItem# { Subscriber: { Subscriber: [2:7439875697335230266:2109] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:41.739114Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875757464772514:2143], recipient# [2:7439875757464772513:2305], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] >> KqpLimits::TooBigQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:17:40.830710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:17:40.830826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:17:40.830875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:17:40.830940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:17:40.830995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:17:40.831028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:17:40.831097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:17:40.831516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:17:40.908753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:17:40.908810Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:40.923411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:17:40.933087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:17:40.933274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:17:40.950517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:17:40.952014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:17:40.952924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:17:40.953370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:17:40.960037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:17:40.961590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:17:40.961662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:17:40.961935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:17:40.961998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:17:40.962042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:17:40.962148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:17:40.974487Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:17:41.226317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:17:41.231882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:41.232198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:17:41.232422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:17:41.232481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:41.241261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:17:41.241456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:17:41.241733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:41.241802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:17:41.241874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:17:41.241911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:17:41.251357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:41.251436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:17:41.251486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:17:41.254279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:41.254342Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:41.254383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:17:41.254466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:17:41.259099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:17:41.261264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:17:41.261469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:17:41.262297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:17:41.262458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:17:41.262497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:17:41.262777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:17:41.262833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:17:41.263005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:17:41.263080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:17:41.264918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:17:41.264956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:17:41.265124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:17:41.265156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:17:41.265437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:41.265483Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:17:41.265586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:17:41.265620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:17:41.265659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:17:41.265693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:17:41.271908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:17:41.272010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:17:41.272178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:17:41.272252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:17:41.272302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:17:41.274627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:17:41.274772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:17:41.274819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:17:41.274890Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:17:41.274939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:17:41.275057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... ags: 2 } ExecLevel: 0 TxId: 175 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:17:44.000898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.000965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.000988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:17:44.001011Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2024-11-21T23:17:44.001034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:17:44.001594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.001657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.001681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:17:44.001714Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2024-11-21T23:17:44.001739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T23:17:44.001799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T23:17:44.005832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2024-11-21T23:17:44.005943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2024-11-21T23:17:44.006659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:17:44.006774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:17:44.006844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2024-11-21T23:17:44.006903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:17:44.006933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T23:17:44.007051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2024-11-21T23:17:44.007258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:17:44.007319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T23:17:44.008836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:17:44.009217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2024-11-21T23:17:44.013905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:17:44.013976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:17:44.014142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T23:17:44.014274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:17:44.014310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2024-11-21T23:17:44.014425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2024-11-21T23:17:44.014536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T23:17:44.014586Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2024-11-21T23:17:44.014673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2024-11-21T23:17:44.014711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T23:17:44.014808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2024-11-21T23:17:44.014855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T23:17:44.014886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2024-11-21T23:17:44.014927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2024-11-21T23:17:44.015011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T23:17:44.015046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2024-11-21T23:17:44.015076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2024-11-21T23:17:44.015104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2024-11-21T23:17:44.016079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.016168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.016203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:17:44.016254Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2024-11-21T23:17:44.016905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:17:44.018140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.018241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.018283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:17:44.018325Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2024-11-21T23:17:44.018363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T23:17:44.018498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2024-11-21T23:17:44.019739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:17:44.019793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T23:17:44.019881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2024-11-21T23:17:44.020308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:17:44.020351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T23:17:44.020413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:17:44.022803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:17:44.026516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:17:44.026630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:17:44.026684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2024-11-21T23:17:44.027596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2024-11-21T23:17:44.027630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2024-11-21T23:17:44.028607Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2024-11-21T23:17:44.028692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2024-11-21T23:17:44.028717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2463:4455] TestWaitNotification: OK eventTxId 175 >> TGcTest::ShouldRemovePreviousCheckpoints [FAIL] >> TGcTest::ShouldIgnoreIncrementCheckpoint >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup >> TGcTest::ShouldIgnoreIncrementCheckpoint [FAIL] >> TStateStorageTest::ShouldCountStates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:17:42.425393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:17:42.425489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:17:42.425535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:17:42.425582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:17:42.425624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:17:42.425648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:17:42.425702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:17:42.426026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:17:42.517619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:17:42.517681Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:42.547519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:17:42.551505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:17:42.551698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:17:42.558629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:17:42.559339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:17:42.560009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:17:42.560324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:17:42.565102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:17:42.566408Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:17:42.566479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:17:42.566702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:17:42.566760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:17:42.566800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:17:42.566896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:17:42.573483Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:17:42.696480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:17:42.696740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:42.696990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:17:42.697179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:17:42.697235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:42.707401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:17:42.707606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:17:42.707817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:42.707881Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:17:42.707935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:17:42.707977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:17:42.710296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:42.710442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:17:42.710479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:17:42.712531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:42.712583Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:42.712633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:17:42.712689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:17:42.716374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:17:42.718385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:17:42.718619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:17:42.719539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:17:42.719683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:17:42.719730Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:17:42.720040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:17:42.720108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:17:42.720272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:17:42.720356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:17:42.722383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:17:42.722446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:17:42.722603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:17:42.722640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:17:42.722970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:17:42.723013Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:17:42.723108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:17:42.723145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:17:42.723192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:17:42.723234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:17:42.723282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:17:42.723315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:17:42.723400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:17:42.723444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:17:42.723491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:17:42.725256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:17:42.725363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:17:42.725400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:17:42.725454Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:17:42.725495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:17:42.725603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.791550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.791573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:17:44.791600Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2024-11-21T23:17:44.791645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:17:44.792416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.792487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.792510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:17:44.792537Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2024-11-21T23:17:44.792571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T23:17:44.792641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T23:17:44.794858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2024-11-21T23:17:44.794959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2024-11-21T23:17:44.795449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:17:44.795561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:17:44.795600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2024-11-21T23:17:44.795668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:17:44.795693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T23:17:44.795724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 134 2024-11-21T23:17:44.796565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:17:44.797353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:17:44.798664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T23:17:44.798711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:17:44.798816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2024-11-21T23:17:44.798974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:17:44.799026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 FAKE_COORDINATOR: Erasing txId 175 2024-11-21T23:17:44.800800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:17:44.800845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:17:44.800964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T23:17:44.801066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:17:44.801095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2024-11-21T23:17:44.801139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2024-11-21T23:17:44.801391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T23:17:44.801431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2024-11-21T23:17:44.801467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2024-11-21T23:17:44.802178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.802252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.802276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:17:44.802305Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2024-11-21T23:17:44.802332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:17:44.803159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.803227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:17:44.803250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:17:44.803276Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2024-11-21T23:17:44.803302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T23:17:44.803359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T23:17:44.806600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T23:17:44.806657Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2024-11-21T23:17:44.806724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2024-11-21T23:17:44.806750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T23:17:44.806793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2024-11-21T23:17:44.806823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T23:17:44.806851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2024-11-21T23:17:44.806874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2024-11-21T23:17:44.806924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T23:17:44.807340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:17:44.807377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T23:17:44.807421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2024-11-21T23:17:44.808055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:17:44.808089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T23:17:44.808137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:17:44.808347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:17:44.808698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:17:44.810628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:17:44.810709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2024-11-21T23:17:44.811883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2024-11-21T23:17:44.811918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2024-11-21T23:17:44.813176Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2024-11-21T23:17:44.813292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2024-11-21T23:17:44.813320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2617:4609] TestWaitNotification: OK eventTxId 175 >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> KqpFlipJoin::RightSemi_2 [GOOD] >> KqpFlipJoin::RightSemi_3 >> TStateStorageTest::ShouldCountStates [FAIL] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint >> KqpQuery::RowsLimitServiceOverride >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [FAIL] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin-ColumnStore >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] >> KqpJoin::RightSemiJoin_SecondaryIndex >> KqpJoinOrder::TPCH5-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore >> KqpLimits::QSReplySizeEnsureMemoryLimits >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TestYmqHttpProxy::TestDeleteMessage >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull >> TestKinesisHttpProxy::TestWrongRequest >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TestYmqHttpProxy::TestListQueues >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MergeConnection >> KqpJoinOrder::TPCH3-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH3+StreamLookupJoin-ColumnStore >> TestKinesisHttpProxy::TestListStreamConsumers >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] >> KqpIndexLookupJoin::Left+StreamLookup >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::PartitionStatsFields >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectWhereInSubquery >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [FAIL] Test command err: 2024-11-21T23:17:01.112526Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:35:2082] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2024-11-21T23:17:01.658522Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2024-11-21T23:17:02.738705Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:197, auto NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (getResult.second.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9DBB4C) std::__y1::__function::__func(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'(), std::__y1::allocator(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'()>, void* ()>::operator()()+96 (0xF9DACF0) TMaybe DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+322 (0xF9DA782) bool DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+612 (0xF9DA1E4) bool DoWithRetry(std::__y1::function, TRetryOptions, bool)+1003 (0xF9C508B) NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:197, auto NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (getResult.second.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9DBB4C) std::__y1::__function::__func(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'(), std::__y1::allocator(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'()>, void* ()>::operator()()+96 (0xF9DACF0) TMaybe DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+322 (0xF9DA782) bool DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+612 (0xF9DA1E4) bool DoWithRetry(std::__y1::function, TRetryOptions, bool)+1003 (0xF9C508B) NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:197, auto NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (getResult.second.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9DBB4C) std::__y1::__function::__func(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'(), std::__y1::allocator(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'()>, void* ()>::operator()()+96 (0xF9DACF0) TMaybe DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+322 (0xF9DA782) bool DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+612 (0xF9DA1E4) bool DoWithRetry(std::__y1::function, TRetryOptions, bool)+1003 (0xF9C508B) NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:197, auto NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (getResult.second.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9DBB4C) std::__y1::__function::__func(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'(), std::__y1::allocator(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'()>, void* ()>::operator()()+96 (0xF9DACF0) TMaybe DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+322 (0xF9DA782) bool DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+612 (0xF9DA1E4) bool DoWithRetry(std::__y1::function, TRetryOptions, bool)+1003 (0xF9C508B) NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:197, auto NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (getResult.second.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9DBB4C) std::__y1::__function::__func(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'(), std::__y1::allocator(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'()>, void* ()>::operator()()+96 (0xF9DACF0) TMaybe DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+322 (0xF9DA782) bool DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+612 (0xF9DA1E4) bool DoWithRetry(std::__y1::function, TRetryOptions, bool)+1003 (0xF9C508B) NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+ ... nts::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:197, auto NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (getResult.second.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9DBB4C) std::__y1::__function::__func(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'(), std::__y1::allocator(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'()>, void* ()>::operator()()+96 (0xF9DACF0) TMaybe DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+322 (0xF9DA782) bool DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+612 (0xF9DA1E4) bool DoWithRetry(std::__y1::function, TRetryOptions, bool)+1003 (0xF9C508B) NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:197, auto NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (getResult.second.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9DBB4C) std::__y1::__function::__func(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'(), std::__y1::allocator(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'()>, void* ()>::operator()()+96 (0xF9DACF0) TMaybe DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+322 (0xF9DA782) bool DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+612 (0xF9DA1E4) bool DoWithRetry(std::__y1::function, TRetryOptions, bool)+1003 (0xF9C508B) NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:197, auto NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext &)::(anonymous class)::operator()() const: (getResult.second.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9DBB4C) std::__y1::__function::__func(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'(), std::__y1::allocator(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)::'lambda'()>, void* ()>::operator()()+96 (0xF9DACF0) TMaybe DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+322 (0xF9DA782) bool DoWithRetry(std::__y1::function, IRetryPolicy::TPtr const&, bool, std::__y1::function, std::__y1::function)+612 (0xF9DA1E4) bool DoWithRetry(std::__y1::function, TRetryOptions, bool)+1003 (0xF9C508B) NFq::NTestSuiteTGcTest::TTestCaseShouldRemovePreviousCheckpoints::Execute_(NUnitTest::TTestContext&)+3935 (0xF9B697F) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp:94, NFq::(anonymous namespace)::TTestRuntime::TTestRuntime(const char *): (issues.Empty())
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:16581: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:16581 NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) ??+0 (0xF9BEA2B) NFq::NTestSuiteTGcTest::TTestCaseShouldIgnoreIncrementCheckpoint::Execute_(NUnitTest::TTestContext&)+422 (0xF9C67F6) std::__y1::__function::__func, void ()>::operator()()+280 (0xF9CADD8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTGcTest::TCurrentTest::Execute()+1204 (0xF9C9B84) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp:215, virtual void NFq::NTestSuiteTStateStorageTest::TTestCaseShouldCountStates::Execute_(NUnitTest::TTestContext &): (issues.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) NFq::NTestSuiteTStateStorageTest::TTestCaseShouldCountStates::Execute_(NUnitTest::TTestContext&)+3258 (0xFA195EA) std::__y1::__function::__func, void ()>::operator()()+280 (0xFA52938) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTStateStorageTest::TCurrentTest::Execute()+1204 (0xFA51B04) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) assertion failed at ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp:224, virtual void NFq::NTestSuiteTStateStorageTest::TTestCaseShouldCountStatesNonExistentCheckpoint::Execute_(NUnitTest::TTestContext &): (issues.Empty()) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x102FBE70) NFq::NTestSuiteTStateStorageTest::TTestCaseShouldCountStatesNonExistentCheckpoint::Execute_(NUnitTest::TTestContext&)+1481 (0xFA1BC09) std::__y1::__function::__func, void ()>::operator()()+280 (0xFA52938) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x103317A9) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x103029D9) NFq::NTestSuiteTStateStorageTest::TCurrentTest::Execute()+1204 (0xFA51B04) NUnitTest::TTestFactory::Execute()+2438 (0x103042A6) NUnitTest::RunMain(int, char**)+5149 (0x1032B3ED) ??+0 (0x7F62768CDD90) __libc_start_main+128 (0x7F62768CDE40) _start+41 (0xDAB6029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2024-11-21T23:17:30.949144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875712062296378:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:30.949189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00236c/r3tmp/tmploCoeB/pdisk_1.dat 2024-11-21T23:17:31.357073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:31.357180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:31.409525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:31.453249Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24248 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:17:31.672528Z node 1 :TX_PROXY DEBUG: actor# [1:7439875712062296604:2137] Handle TEvNavigate describe path dc-1 2024-11-21T23:17:31.672587Z node 1 :TX_PROXY DEBUG: Actor# [1:7439875716357264323:2428] HANDLE EvNavigateScheme dc-1 2024-11-21T23:17:31.672707Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439875716357263940:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:31.672742Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439875716357263940:2152], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:17:31.672973Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:17:31.674959Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296273:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875716357264328:2429] 2024-11-21T23:17:31.675070Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875712062296273:2051] Subscribe: subscriber# [1:7439875716357264328:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:31.675148Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296279:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875716357264330:2429] 2024-11-21T23:17:31.675168Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875712062296279:2057] Subscribe: subscriber# [1:7439875716357264330:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:31.675219Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264328:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875712062296273:2051] 2024-11-21T23:17:31.675240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264330:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875712062296279:2057] 2024-11-21T23:17:31.675294Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296276:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439875716357264329:2429] 2024-11-21T23:17:31.675296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875716357264325:2429] 2024-11-21T23:17:31.675320Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439875712062296276:2054] Subscribe: subscriber# [1:7439875716357264329:2429], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:17:31.675347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875716357264327:2429] 2024-11-21T23:17:31.675380Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296273:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875716357264328:2429] 2024-11-21T23:17:31.675395Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296279:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875716357264330:2429] 2024-11-21T23:17:31.675402Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439875716357264324:2429][/dc-1] Set up state: owner# [1:7439875716357263940:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:31.675544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264329:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875712062296276:2054] 2024-11-21T23:17:31.675606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264328:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875716357264325:2429], cookie# 1 2024-11-21T23:17:31.675621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264329:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875716357264326:2429], cookie# 1 2024-11-21T23:17:31.675658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264330:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875716357264327:2429], cookie# 1 2024-11-21T23:17:31.675685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439875716357264326:2429] 2024-11-21T23:17:31.675763Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439875716357264324:2429][/dc-1] Path was already updated: owner# [1:7439875716357263940:2152], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:17:31.675788Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296276:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439875716357264329:2429] 2024-11-21T23:17:31.675809Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296276:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875716357264329:2429], cookie# 1 2024-11-21T23:17:31.675830Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296273:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875716357264328:2429], cookie# 1 2024-11-21T23:17:31.675853Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439875712062296279:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439875716357264330:2429], cookie# 1 2024-11-21T23:17:31.675885Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264329:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875712062296276:2054], cookie# 1 2024-11-21T23:17:31.675921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264328:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875712062296273:2051], cookie# 1 2024-11-21T23:17:31.675941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439875716357264330:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875712062296279:2057], cookie# 1 2024-11-21T23:17:31.675972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875716357264326:2429], cookie# 1 2024-11-21T23:17:31.676007Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:17:31.676023Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875716357264325:2429], cookie# 1 2024-11-21T23:17:31.676039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:17:31.676059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439875716357264327:2429], cookie# 1 2024-11-21T23:17:31.676072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439875716357264324:2429][/dc-1] Unexpected sync response: sender# [1:7439875716357264327:2429], cookie# 1 2024-11-21T23:17:31.730368Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439875716357263940:2152], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:17:31.730792Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439875716357263940:2152], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:50.182702Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875796797274719:2346], recipient# [2:7439875796797274716:2537], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:50.182737Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875796797274720:2347], recipient# [2:7439875796797274715:2536], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:50.205053Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439875719097344825:2223], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:50.205347Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875719097344825:2223], cacheItem# { Subscriber: { Subscriber: [3:7439875731982246840:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:50.205439Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875796406771682:8088], recipient# [3:7439875796406771681:4475], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:50.430835Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7439875719097344825:2223], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:50.430956Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7439875719097344825:2223], cacheItem# { Subscriber: { Subscriber: [3:7439875731982246839:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:50.431036Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7439875796406771684:8089], recipient# [3:7439875796406771683:4476], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:51.173111Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875762437536069:2223], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:51.173256Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875762437536069:2223], cacheItem# { Subscriber: { Subscriber: [2:7439875792502307278:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:51.173346Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875801092242019:2348], recipient# [2:7439875801092242018:2538], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:51.184804Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875762437536069:2223], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:51.184946Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875762437536069:2223], cacheItem# { Subscriber: { Subscriber: [2:7439875792502307279:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:51.185026Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875762437536069:2223], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:51.185096Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875762437536069:2223], cacheItem# { Subscriber: { Subscriber: [2:7439875792502307278:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:51.185145Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439875762437536069:2223], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:51.185204Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439875762437536069:2223], cacheItem# { Subscriber: { Subscriber: [2:7439875792502307278:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2024-11-21T23:17:51.185267Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875801092242023:2349], recipient# [2:7439875801092242020:2539], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:51.185335Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875801092242024:2350], recipient# [2:7439875801092242021:2540], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:17:51.185379Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439875801092242025:2351], recipient# [2:7439875801092242022:2541], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::DefaultParameterValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:111:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:113:2058] recipient: [1:107:2139] Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:123:2058] recipient: [1:105:2137] Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:129:2058] recipient: [1:106:2138] Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:133:2058] recipient: [1:107:2139] 2024-11-21T23:09:00.223713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:09:00.223818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:00.223857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:09:00.223892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:09:00.223963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:09:00.223998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:09:00.224094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:09:00.224468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:09:00.331269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:09:00.331345Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:127:2151] sender: [1:168:2058] recipient: [1:15:2062] 2024-11-21T23:09:00.342982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:09:00.343463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:09:00.343674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:09:00.362988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:09:00.363443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:09:00.364173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:00.365724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:00.369294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:00.370708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:00.370772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:00.370854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:09:00.370904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:00.370942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:09:00.371243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:209:2058] recipient: [1:207:2209] Leader for TabletID 72057594037968897 is [1:213:2213] sender: [1:214:2058] recipient: [1:207:2209] 2024-11-21T23:09:00.378455Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:122:2148] sender: [1:234:2058] recipient: [1:15:2062] 2024-11-21T23:09:00.550047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:09:00.550270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:00.550485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:09:00.550739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:09:00.550798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:00.559420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:00.559582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:09:00.559873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:00.559965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:09:00.560025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:09:00.560061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:09:00.564073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:00.564151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:09:00.564191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:09:00.572325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:00.572401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:00.572464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:00.572513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:09:00.577017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:09:00.579464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:09:00.579700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:130:2153] sender: [1:249:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:09:00.580751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:09:00.580897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 4294969449 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:09:00.580947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:00.581224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:09:00.581296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:09:00.581476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:09:00.581576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:09:00.583975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:09:00.584058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:09:00.584259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:09:00.584319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:201:2204], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:09:00.584747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:09:00.584801Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:09:00.584915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:09:00.584960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:00.585015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:09:00.585087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:09:00.585129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:09:00.585163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:09:00.585227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:09:00.585274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:09:00.585312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [Own ... Queue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:17:50.601065Z node 176 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:17:50.601359Z node 176 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 338us result status StatusSuccess 2024-11-21T23:17:50.602297Z node 176 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:17:50.613754Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:17:50.613853Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2024-11-21T23:17:50.613930Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T23:17:50.614004Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2024-11-21T23:17:50.614124Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1732231070579999 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1732231070579999 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:17:50.614764Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1732231070579999 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2024-11-21T23:17:50.621399Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][176:1008:2788] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2024-11-21T23:17:50.621513Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2024-11-21T23:17:50.621765Z node 176 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][176:1009:2788] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2024-11-21T23:17:50.622092Z node 176 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][176:937:2788] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |86.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] >> KqpFlipJoin::RightSemi_3 [GOOD] >> KqpFlipJoin::RightOnly_1 >> KqpLimits::ComputeActorMemoryAllocationFailure [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService |86.4%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::QSReplySizeEnsureMemoryLimits [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibility >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2024-11-21T23:15:32.935647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:32.940585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:32.940791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002b49/r3tmp/tmpLwnUiP/pdisk_1.dat 2024-11-21T23:15:33.393943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.443090Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.493760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:33.493922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:33.505726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:33.626966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.668773Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:33.669959Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:33.670458Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:33.670805Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:33.713559Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:33.714431Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:33.714578Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:33.716333Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:33.716420Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:33.716486Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:33.716843Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:33.753350Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:33.753585Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:33.753679Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:33.753716Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:33.753755Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:33.753788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:33.754246Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.754314Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.754875Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:33.754965Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:33.755082Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.755115Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.755168Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:33.755221Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:33.755277Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:33.755327Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:33.755367Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:33.755402Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:33.755433Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:33.755496Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:33.755658Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:33.755703Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:33.755828Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:33.756110Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:33.756158Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:33.756283Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:33.756349Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:33.756388Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:33.756424Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:33.756475Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:33.756715Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:33.756755Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:33.756787Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:33.756816Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:33.756871Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:33.756899Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:33.756938Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:33.756995Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:33.757022Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:33.758444Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:33.758508Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:33.769685Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:33.769786Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:33.769875Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:33.769928Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:33.770006Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:33.966113Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.966192Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:33.966230Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:33.966333Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:33.966364Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:33.966500Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:33.966569Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:33.966614Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:33.966646Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:33.971262Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:33.971335Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:33.971803Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.971840Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:33.971887Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:33.971954Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:33.971994Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:33.972036Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... 037889 2024-11-21T23:17:53.706668Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2024-11-21T23:17:53.706704Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2024-11-21T23:17:53.706742Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:53.706791Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2024-11-21T23:17:53.706822Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2024-11-21T23:17:53.706854Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2024-11-21T23:17:53.707026Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2024-11-21T23:17:53.707081Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:53.707110Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2024-11-21T23:17:53.707140Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:53.707168Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2024-11-21T23:17:53.707213Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2024-11-21T23:17:53.707257Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2024-11-21T23:17:53.707303Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2024-11-21T23:17:53.707351Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:53.707376Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:53.707401Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2024-11-21T23:17:53.707431Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2024-11-21T23:17:53.707577Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2024-11-21T23:17:53.707611Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2024-11-21T23:17:53.707656Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2024-11-21T23:17:53.707695Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2024-11-21T23:17:53.707722Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:53.707749Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2024-11-21T23:17:53.707778Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2024-11-21T23:17:53.707807Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:53.707963Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2024-11-21T23:17:53.707998Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2024-11-21T23:17:53.708041Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:17:53.708082Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:17:53.708120Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2024-11-21T23:17:53.708146Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:17:53.708175Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2024-11-21T23:17:53.708215Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:53.708253Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2024-11-21T23:17:53.708296Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2024-11-21T23:17:53.708335Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2024-11-21T23:17:53.731029Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3000 txid# 281474976715664} 2024-11-21T23:17:53.731170Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2024-11-21T23:17:53.731323Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:17:53.731415Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2024-11-21T23:17:53.731554Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1000:2801], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:53.731659Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:17:53.732092Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037889 step# 3000 txid# 281474976715664} 2024-11-21T23:17:53.732139Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2024-11-21T23:17:53.732190Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2024-11-21T23:17:53.732226Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2024-11-21T23:17:53.732277Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1000:2801], exec latency: 0 ms, propose latency: 0 ms 2024-11-21T23:17:53.732328Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2024-11-21T23:17:53.734279Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:558:2485], Recipient [15:632:2537]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2024-11-21T23:17:53.737646Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:17:53.737851Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2024-11-21T23:17:53.738080Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:53.738164Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:17:53.738253Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:17:53.738318Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:17:53.738375Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2024-11-21T23:17:53.738499Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:53.738547Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:17:53.738584Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:17:53.738625Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:17:53.738832Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2024-11-21T23:17:53.738916Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2024-11-21T23:17:53.739226Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:53.739267Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:17:53.739301Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:17:53.739337Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:17:53.739407Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2024-11-21T23:17:53.739434Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:17:53.739473Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2024-11-21T23:17:53.739558Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:17:53.739741Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2024-11-21T23:17:53.741091Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [15:1021:2820], Recipient [15:632:2537]: NKikimr::TEvDataShard::TEvReadScanStarted 2024-11-21T23:17:53.741329Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [15:1021:2820], Recipient [15:632:2537]: NKikimr::TEvDataShard::TEvReadScanFinished 2024-11-21T23:17:53.741672Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:632:2537], Recipient [15:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:53.741728Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:17:53.741840Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:17:53.741913Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:17:53.741975Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:17:53.742037Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:17:53.742096Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:17:53.742172Z node 15 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:17:53.742262Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 >> TestKinesisHttpProxy::ListShardsTimestamp >> TestYmqHttpProxy::TestDeleteQueue >> TPersQueueMirrorer::ValidStartStream [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+StreamLookupJoin-ColumnStore >> TestKinesisHttpProxy::BadRequestUnknownMethod >> TestYmqHttpProxy::TestListQueues [GOOD] >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] >> KqpJoin::RightTableKeyPredicate >> TestKinesisHttpProxy::TestCounters >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> TestYmqHttpProxy::TestPurgeQueue >> KqpLimits::QueryReplySize >> Sharding::XXUsage >> Sharding::XXUsage [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown >> KqpExplain::ExplainScanQueryWithParams >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::SimpleFeatureFlags >> KqpIndexLookupJoin::Left+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 10629758540673412549 17077929650696302699 7572266159848487531 16281981579995368438 14662984523678101355 7611155428864633112 2740776975338183791 12474582993366765337 12867179712731803663 14925152027531771612 16991577290226695422 16709955532790690806 18379467501372554995 10039126090914372652 3148937572694131571 11652072822162968543 8177636937396152483 11778500171244642819 1924726586151680575 3941811452796934397 2025453705032793558 15375770444168661483 3995551645963388360 15879328586226048028 2078254102501529744 1487734054420723762 10528242836105148686 10876868815373921857 913795701343263412 14871291592697022665 8682658969876241481 7623545974813612023 408577138980861130 13525207896615445234 17808975456581441464 14283506668755557350 9003225391177281285 9350692267840824991 4970849055414147296 16701774491882796213 2284541518523215397 8987803015804787471 925488762105678417 12157484983096813291 16282356641205816811 9674102133560432687 4194151015924969227 14862379768202830880 2727517966903258955 15816981008354015183 13045847802158802531 16442114259971083231 5170156644703598866 1750867554719666641 15492676836545305776 16333813863405218003 16731591051553170784 5655362025916876835 14680485028545211529 9954986864188203660 12011190449538640188 17502679322797796025 17362797109916055980 1603269068938198866 14634572702183088049 5327163267363636175 245981415740286065 4395136423129980678 9788564372161456992 10688251522666770492 17722124227392484733 9200236595896414276 1036131444637023229 6409956095184467206 4669668556313900572 2914627249142549098 9352831760596582627 10754707581229484424 1297510378808225864 4801543068640463097 7185626633554579665 17268182949718823675 9600079810726459468 997718318311732054 13371284433953763377 1405925293199523154 5878482583093083221 13419069660644453488 17604445050170677515 12765948188552788958 >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::SelectCountAsteriskFromVar |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |86.4%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |86.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService |86.4%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> KqpParams::ImplicitParameterTypes >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService [GOOD] >> KqpLimits::CancelAfterRwTx >> KqpLimits::OutOfSpaceBulkUpsertFail >> SystemView::PartitionStatsFields [GOOD] >> KqpJoinOrder::TPCH3+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH3-StreamLookupJoin+ColumnStore >> KqpExplain::IdxFullscan [GOOD] |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> KqpFlipJoin::RightOnly_1 [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> KqpFlipJoin::RightOnly_2 >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> TSchemeShardTest::NestedDirs >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TestKinesisHttpProxy::ListShardsToken >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> KqpExplain::ExplainDataQuery >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TestKinesisHttpProxy::TestCounters [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch >> KqpJoin::RightTableKeyPredicate [GOOD] >> KqpJoin::RightTableIndexPredicate >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter |86.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2024-11-21T23:17:18.992132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875658665319775:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:18.993592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024bd/r3tmp/tmpYD2yto/pdisk_1.dat 2024-11-21T23:17:19.432235Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:19.439739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:19.439809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:19.442529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26372, node 1 2024-11-21T23:17:19.516432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:19.516452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:19.516462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:19.516583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:19.814425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.820427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.820482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.821102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:19.821310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:19.821329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:19.821893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:19.821904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:19.822196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:19.824806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231039868, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:19.824844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:19.825080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:19.825526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:19.825810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.825973Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.826023Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:19.826095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:19.826167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:19.826215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:17:19.828671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:19.828739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:19.828754Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:19.828844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:17698 2024-11-21T23:17:20.037863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.038058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.038076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.059297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:20.059482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:20.070749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:20.077925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040113, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.077962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231040113, at schemeshard: 72057594046644480 2024-11-21T23:17:20.078213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:20.078289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:20.078319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 1 2024-11-21T23:17:20.078858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.079017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.082005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:20.082073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:20.082090Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:20.082190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:17:20.091060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.091333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.091351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.091416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:20.091512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:20.091542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:20.092101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:20.092226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.092440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.093181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:20.093221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:20.093235Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:20.093309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T23:17:20.095602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.095772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.096369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: C ... 046644480 2024-11-21T23:18:03.673100Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:03.673218Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715699:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:18:03.673600Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715699, at schemeshard: 72057594046644480 2024-11-21T23:18:03.674157Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715699 2024-11-21T23:18:03.674200Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715699 2024-11-21T23:18:03.674221Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715699, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 5 2024-11-21T23:18:03.674580Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715699 2024-11-21T23:18:03.674615Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715699 2024-11-21T23:18:03.674633Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715699, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 3 2024-11-21T23:18:03.675712Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231083723, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:18:03.675760Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715699:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732231083723, at schemeshard: 72057594046644480 2024-11-21T23:18:03.675896Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715699:0 128 -> 240 2024-11-21T23:18:03.676528Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:03.676731Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:03.676777Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715699:0 ProgressState 2024-11-21T23:18:03.676861Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715699:0 progress is 1/1 2024-11-21T23:18:03.676902Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715699:0 2024-11-21T23:18:03.676944Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715699, publications: 2, subscribers: 1 2024-11-21T23:18:03.678353Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715699 2024-11-21T23:18:03.678928Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715699 2024-11-21T23:18:03.678963Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715699, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 6 2024-11-21T23:18:03.679172Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715699 2024-11-21T23:18:03.679196Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715699 2024-11-21T23:18:03.679210Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715699, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 4 2024-11-21T23:18:03.679253Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715699, subscribers: 1 2024-11-21T23:18:03.683242Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000101v0/v2, operationId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.683540Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715700:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:18:03.684332Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715700, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000101v0/v2 2024-11-21T23:18:03.684468Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:03.684676Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:03.684728Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715700:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:18:03.686377Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T23:18:03.686449Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T23:18:03.686472Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 5 2024-11-21T23:18:03.686686Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 30 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T23:18:03.686708Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T23:18:03.686721Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 30], version: 3 2024-11-21T23:18:03.688103Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231083737, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:18:03.688144Z node 6 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715700:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732231083737, at schemeshard: 72057594046644480 2024-11-21T23:18:03.688265Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715700:0 128 -> 240 2024-11-21T23:18:03.688662Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715700, at schemeshard: 72057594046644480 2024-11-21T23:18:03.688893Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:03.689083Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:03.689130Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715700:0 ProgressState 2024-11-21T23:18:03.689217Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715700:0 progress is 1/1 2024-11-21T23:18:03.689259Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715700:0 2024-11-21T23:18:03.689301Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715700, publications: 2, subscribers: 1 2024-11-21T23:18:03.689997Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 29 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T23:18:03.690032Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T23:18:03.690048Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 29], version: 6 2024-11-21T23:18:03.690235Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 30 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715700 2024-11-21T23:18:03.690257Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715700 2024-11-21T23:18:03.690268Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715700, pathId: [OwnerId: 72057594046644480, LocalPathId: 30], version: 4 2024-11-21T23:18:03.690314Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715700, subscribers: 1 2024-11-21T23:18:03.724789Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439875853943412411:2438]: Pool not found 2024-11-21T23:18:03.810072Z node 6 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [709562bf-a4513268-cb76e9e2-b984f721] Got succesfult GRPC response. 2024-11-21T23:18:03.810166Z node 6 :HTTP_PROXY INFO: http request [CreateQueue] requestId [709562bf-a4513268-cb76e9e2-b984f721] reply ok 2024-11-21T23:18:03.810353Z node 6 :HTTP DEBUG: (#44,[::1]:33252) <- (200 ) 2024-11-21T23:18:03.810522Z node 6 :HTTP DEBUG: (#44,[::1]:33252) connection closed Http output full {"QueueUrl":"http://ghrun-uc25mmfz34.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName.fifo"} 2024-11-21T23:18:03.817297Z node 6 :HTTP DEBUG: (#44,[::1]:33260) incoming connection opened 2024-11-21T23:18:03.819735Z node 6 :HTTP DEBUG: (#44,[::1]:33260) -> (POST /Root) 2024-11-21T23:18:03.819893Z node 6 :HTTP_PROXY INFO: proxy service: incoming request from [b882:501:6050:0:a082:501:6050:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: 84755a2a-c60fa2e2-342ce187-96fdceda 2024-11-21T23:18:03.820866Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [84755a2a-c60fa2e2-342ce187-96fdceda] got new request from [b882:501:6050:0:a082:501:6050:0] 2024-11-21T23:18:03.827667Z node 6 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [84755a2a-c60fa2e2-342ce187-96fdceda] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:03.827711Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [84755a2a-c60fa2e2-342ce187-96fdceda] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:03.958060Z node 6 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [84755a2a-c60fa2e2-342ce187-96fdceda] Got succesfult GRPC response. 2024-11-21T23:18:03.958320Z node 6 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [84755a2a-c60fa2e2-342ce187-96fdceda] reply ok 2024-11-21T23:18:03.958612Z node 6 :HTTP DEBUG: (#44,[::1]:33260) <- (200 ) 2024-11-21T23:18:03.958695Z node 6 :HTTP DEBUG: (#44,[::1]:33260) connection closed Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"b83fc4eb-ef4a6749-30e94816-cd9a2ecd"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"babaa6b4-e762b71e-e33e5c32-7d639af5"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::PartitionStatsFields [GOOD] Test command err: 2024-11-21T23:15:48.613367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875271698336698:2243];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:48.613453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000d4b/r3tmp/tmppwftWY/pdisk_1.dat 2024-11-21T23:15:49.518109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:49.518194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:49.539381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:49.583763Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28274, node 1 2024-11-21T23:15:49.883957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:49.883979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:49.883987Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:49.884084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:50.467299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.547582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.640739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:50.726106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:50.726194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:50.729385Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T23:15:50.730368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:50.729381Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875280538464186:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:50.771360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:50.771432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:50.786546Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T23:15:50.811213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:50.875307Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:51.236898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:51.293094Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875284800013110:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:51.293131Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:51.320390Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875284481258429:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:51.412167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:15:51.525236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:51.525343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:51.585807Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:15:51.613617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:51.670283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:51.670358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:51.730793Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:15:51.739351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2024-11-21T23:15:51.916467Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:53.610525Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875271698336698:2243];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:53.610602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:55.596302Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439875280538464186:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:55.603600Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:56.290667Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875284481258429:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:56.290761Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:56.294741Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875284800013110:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:56.294823Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:15:57.925877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:15:58.578210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875314648010933:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:58.588258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875314648010921:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:58.588436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:58.601908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2024-11-21T23:15:58.683535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875314648010935:2325], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2024-11-21T23:15:59.464089Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8g5kfa2xdq986x3jjjbn32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2I0NmI3ODEtNzBjMzYyYTYtYzNmYTM2NzAtYWFiNDZhZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:15:59.547239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:16:00.415536Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jd8g5msg9r76zpkz1vhz8cjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2I0NmI3ODEtNzBjMzYyYTYtYzNmYTM2NzAtYWFiNDZhZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:00.518333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2024-11-21T23:16:01.124223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jd8g5nseaf7cvmjaghtas8cr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2I0NmI3ODEtNzBjMzYyYTYtYzNmYTM2NzAtYWFiNDZhZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:01.487615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01j ... ot set, use /Root 2024-11-21T23:17:45.501371Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710728. Ctx: { TraceId: 01jd8g8vpr8cbq86nhpnmb3hsp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=M2ZmZDgxMWEtZmYxZjAxYjMtNzAwM2JmMjYtM2EyOGU4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:46.705223Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710729. Ctx: { TraceId: 01jd8g8wwpeeyz4yb75j3ggfqz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=M2ZmZDgxMWEtZmYxZjAxYjMtNzAwM2JmMjYtM2EyOGU4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:48.011579Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710730. Ctx: { TraceId: 01jd8g8y2cettw2cthv0vwy9w6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=M2ZmZDgxMWEtZmYxZjAxYjMtNzAwM2JmMjYtM2EyOGU4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:49.235549Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710731. Ctx: { TraceId: 01jd8g8zbv3265r4g6ftpw0136, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=M2ZmZDgxMWEtZmYxZjAxYjMtNzAwM2JmMjYtM2EyOGU4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:50.401697Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976710733. Ctx: { TraceId: 01jd8g90h8fqewea3zwf7ffzhn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=YzQyNzhiNWMtOTkzMzBmNTktOGFlNWNmNWItMjIwOGE0OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:50.403663Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7439875795236932846:2957], owner: [11:7439875795236932842:2955], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:17:50.404493Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7439875795236932846:2957], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:17:50.404950Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7439875795236932846:2957], row count: 1, finished: 1 2024-11-21T23:17:50.404997Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7439875795236932846:2957], owner: [11:7439875795236932842:2955], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:17:50.409846Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231070400, txId: 281474976710732] shutting down 2024-11-21T23:17:52.962875Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7439875807462400225:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:52.962944Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000d4b/r3tmp/tmpCNg75n/pdisk_1.dat 2024-11-21T23:17:53.232693Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:53.290815Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:53.290960Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:53.299117Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3332, node 16 2024-11-21T23:17:53.397562Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:53.397597Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:53.397609Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:53.397818Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:53.778046Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:53.807797Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:57.965663Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7439875807462400225:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:57.965731Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:58.277921Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439875833232204993:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:58.278047Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:58.279382Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7439875833232205005:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:58.284258Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:17:58.393323Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7439875833232205007:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:17:58.640481Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8g98c2bge8sc9sgcqvpep1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZDhhMGQ3ZGMtY2FiOGI5NDMtODdlYjNmOGMtMTA0YmJiOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:58.839934Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8g98ra8k73pf0jz5y8rqwg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZjUzZmQ1ZTgtNWMwZGE4NTAtMTI3Y2IwMTQtYjBmYjQ2OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:58.843201Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439875833232205150:2327], owner: [16:7439875833232205147:2325], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:17:58.844519Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439875833232205150:2327], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:17:58.844774Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439875833232205150:2327], row count: 1, finished: 1 2024-11-21T23:17:58.844806Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439875833232205150:2327], owner: [16:7439875833232205147:2325], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:17:58.848389Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231078834, txId: 281474976715662] shutting down 2024-11-21T23:18:00.008114Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8g99xs2n81h5svrnfqx3vj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=Y2ZiN2M0ZWQtOTU3NGVjOTctODMwMDMyZGQtMTNhMzMwMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:18:00.011074Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439875841822139802:2344], owner: [16:7439875841822139799:2342], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:18:00.023114Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439875841822139802:2344], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:18:00.023495Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439875841822139802:2344], row count: 1, finished: 1 2024-11-21T23:18:00.023545Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439875841822139802:2344], owner: [16:7439875841822139799:2342], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:18:00.026116Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231080006, txId: 281474976715664] shutting down 2024-11-21T23:18:00.327762Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd8g9a4c9p9qybsm3e5yh5rx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MTZhYjQ5NDAtYTY0YzQ0NzktMTliMGY5NGQtYTVhNjBjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:18:00.338537Z node 16 :SYSTEM_VIEWS INFO: Scan started, actor: [16:7439875841822139837:2354], owner: [16:7439875841822139833:2352], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:18:00.339554Z node 16 :SYSTEM_VIEWS INFO: Scan prepared, actor: [16:7439875841822139837:2354], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:18:00.339856Z node 16 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [16:7439875841822139837:2354], row count: 1, finished: 1 2024-11-21T23:18:00.339891Z node 16 :SYSTEM_VIEWS INFO: Scan finished, actor: [16:7439875841822139837:2354], owner: [16:7439875841822139833:2352], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2024-11-21T23:18:00.368071Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231080326, txId: 281474976715666] shutting down |86.4%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 10109, MsgBus: 9212 2024-11-21T23:17:43.217303Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875767771483490:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:43.217404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00292d/r3tmp/tmpZ9mMXB/pdisk_1.dat 2024-11-21T23:17:43.818111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:43.818199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:43.822533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10109, node 1 2024-11-21T23:17:43.867932Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:17:43.870043Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:17:43.956874Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:43.970879Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:43.970908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:43.970916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:43.978957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9212 TClient is connected to server localhost:9212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:44.623075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.657512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.779961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.936168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:45.030971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:46.866950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875780656387026:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:46.867090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:47.148301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.222410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.265190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.298600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.338429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.368691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.413810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875784951354821:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:47.413896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:47.414046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875784951354826:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:47.417467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:47.427659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875784951354828:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:48.216392Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875767771483490:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:48.216458Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"SUM(10,15)"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"SUM(10,15)"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 9030, MsgBus: 63044 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00292d/r3tmp/tmpoma3Av/pdisk_1.dat 2024-11-21T23:17:49.849903Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:49.861507Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:49.875298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:49.875541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:49.879247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9030, node 2 2024-11-21T23:17:49.945302Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:49.945325Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:49.945332Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:49.945447Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63044 TClient is connected to server localhost:63044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion ... "Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 13935, MsgBus: 2299 2024-11-21T23:17:55.478521Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875818814662452:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00292d/r3tmp/tmpGEuVGd/pdisk_1.dat 2024-11-21T23:17:55.532750Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:17:55.624756Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:55.642156Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:55.642233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:55.646912Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13935, node 3 2024-11-21T23:17:55.709433Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:55.709456Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:55.709463Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:55.709578Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2299 TClient is connected to server localhost:2299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:56.243924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:56.253280Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:17:56.264144Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:56.332044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:56.512032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:56.591422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:59.200141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875835994533177:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:59.200264Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:59.255056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:59.327542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:59.378137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:59.469744Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:59.560915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:59.606291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:59.672082Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875835994533682:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:59.672185Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:59.672550Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875835994533687:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:59.678836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:59.689191Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875835994533689:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:00.482761Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875818814662452:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:00.482830Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:01.000488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:01.335692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:18:01.384401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["id"],"Node Type":"TableLookupJoin","PlanNodeId":3,"Columns":["Value","complex_field","id","str_field"],"E-Rows":"No estimate","Table":"test_table_idx","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"},{"Operators":[{"E-Rows":"No estimate","Columns":["Value","complex_field","id","str_field"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["id"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Nulls >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 20563, MsgBus: 3068 2024-11-21T23:17:20.029374Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875667661419968:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:20.029911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f7e/r3tmp/tmpbQlwyZ/pdisk_1.dat 2024-11-21T23:17:20.700705Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:20.706453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:20.706598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:20.710386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20563, node 1 2024-11-21T23:17:20.827448Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:20.827484Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:20.827492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:20.827591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3068 TClient is connected to server localhost:3068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:21.421609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:21.435282Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:17:21.446782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:21.549321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:21.733701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:17:21.819484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:17:23.487823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875680546323418:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:23.487960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:23.710631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:23.738531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:23.793366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:23.821553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:23.851000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:23.881057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:23.929168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875680546323911:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:23.929344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:23.929692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875680546323916:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:23.935946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:23.946204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875680546323918:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:17:24.983320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:25.017211Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875667661419968:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:25.017337Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:25.063077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31104, MsgBus: 24042 2024-11-21T23:17:26.795834Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875692644030033:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:26.795939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f7e/r3tmp/tmpGVdkN8/pdisk_1.dat 2024-11-21T23:17:26.935353Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:26.948052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:26.948150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:26.949560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31104, node 2 2024-11-21T23:17:27.027014Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:27.027103Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:27.027117Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:27.027247Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24042 TClient is connected to server localhost:24042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:27.444635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:27.454235Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:17:27.461684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:27.541934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:27.698345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... ice] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:53.237236Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:53.307970Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:53.352851Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:53.400941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:53.441200Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:53.491431Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:53.575838Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875810477633219:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:53.575957Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:53.576213Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875810477633224:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:53.580846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:53.593951Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439875810477633226:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:17:53.902586Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439875789002794551:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:53.902662Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:54.971071Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:55.125536Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7098, MsgBus: 27033 2024-11-21T23:17:57.165153Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439875828277493898:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:57.165233Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f7e/r3tmp/tmpJ9G5ZM/pdisk_1.dat 2024-11-21T23:17:57.324102Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:57.363198Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:57.363305Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:57.365752Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7098, node 6 2024-11-21T23:17:57.422322Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:57.422348Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:57.422358Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:57.422534Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27033 TClient is connected to server localhost:27033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:58.134482Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:58.146608Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:17:58.161295Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:58.271917Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:58.547976Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:58.650992Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:01.875314Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875845457364796:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:01.875411Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:01.950663Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.001548Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.046265Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.119917Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.171813Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439875828277493898:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:02.171882Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:02.196320Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.272085Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.386576Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875849752332594:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:02.386731Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:02.389958Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875849752332599:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:02.396528Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:02.415231Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439875849752332602:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:03.979167Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.108470Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> TestKinesisHttpProxy::GoodRequestCreateStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2024-11-21T23:17:18.957811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875660538533455:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:18.964786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024d3/r3tmp/tmpLkYMi2/pdisk_1.dat 2024-11-21T23:17:19.439648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:19.439823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:19.442171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:19.539507Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3219, node 1 2024-11-21T23:17:19.657686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:19.657712Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:19.657720Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:19.657838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:20.006576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.013007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.013078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.013757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:20.013937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:20.013951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:20.016016Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:20.016050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:20.016525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:20.018190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040064, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.018235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:20.018515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:20.019219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.019388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.019439Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:20.019551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:20.019592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:20.019629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:17:20.022804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:20.022873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:20.022912Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:20.022990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T23:17:20.023219Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:18218 2024-11-21T23:17:20.275804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.275985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.276004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.276460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:20.276564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.277804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040323, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.277833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231040323, at schemeshard: 72057594046644480 2024-11-21T23:17:20.278060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:20.278118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:20.278140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:17:20.278838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.278994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.280129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:20.280179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:20.280209Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:20.280279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2024-11-21T23:17:20.281954Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:20.283549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.283786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.283802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.283854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:20.283941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:20.283958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:20.284309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:20.284393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.284619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.285421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:20.285445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:20.285477Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:20.285521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T23:17:20.286988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.287166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.287739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIREC ... 40 2024-11-21T23:18:03.519259Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715685:0 ProgressState 2024-11-21T23:18:03.519341Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715685:0 progress is 1/1 2024-11-21T23:18:03.519386Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715685:0 2024-11-21T23:18:03.523739Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/SQS/.STD/MessageData, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.524357Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 1 -> 2 2024-11-21T23:18:03.525130Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:18:03.525157Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.525976Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/SQS/.STD/MessageData 2024-11-21T23:18:03.526159Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:03.526436Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:03.526515Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 waiting... 2024-11-21T23:18:03.528547Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T23:18:03.528623Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T23:18:03.528648Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 15 2024-11-21T23:18:03.528892Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T23:18:03.528915Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T23:18:03.528928Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 2 2024-11-21T23:18:03.529953Z node 6 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715686, at schemeshard: 72057594046644480 2024-11-21T23:18:03.532217Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:18:03.532654Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 2 -> 3 2024-11-21T23:18:03.533465Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:18:03.589717Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:18:03.589745Z node 6 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:18:03.589818Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 3 -> 128 2024-11-21T23:18:03.590308Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:18:03.591704Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231083639, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:18:03.591752Z node 6 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732231083639 2024-11-21T23:18:03.591853Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 128 -> 129 2024-11-21T23:18:03.592467Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:03.592855Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:03.592926Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:18:03.593406Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T23:18:03.593437Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T23:18:03.593453Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 16 2024-11-21T23:18:03.593619Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T23:18:03.593639Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T23:18:03.593653Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 4 2024-11-21T23:18:03.598239Z node 6 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715686 Step: 1732231083639 OrderId: 281474976715686 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 1522 } } 2024-11-21T23:18:03.599516Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:18:03.599557Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.599584Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2024-11-21T23:18:03.600268Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2024-11-21T23:18:03.600370Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2024-11-21T23:18:03.600421Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2024-11-21T23:18:04.699665Z node 6 :HTTP INFO: Listening on http://[::]:15858 2024-11-21T23:18:04.719460Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439875856864196669:2414], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T23:18:04.729827Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439875856864196692:2419], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T23:18:04.729932Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875856864196688:2416], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.730051Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.730066Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439875856864196689:2417], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T23:18:04.900911Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439875856864196690:2418]: Pool not found 2024-11-21T23:18:05.414856Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439875856864196686:2415]: Pool not found 2024-11-21T23:18:05.417869Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875861159164153:2440], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:05.417946Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:05.417970Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7439875861159164154:2441], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T23:18:05.675991Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [6:7439875861159164151:2439]: Pool not found 2024-11-21T23:18:05.703111Z node 6 :HTTP DEBUG: (#44,[::1]:44858) incoming connection opened 2024-11-21T23:18:05.703194Z node 6 :HTTP DEBUG: (#44,[::1]:44858) -> (POST /Root) 2024-11-21T23:18:05.703341Z node 6 :HTTP_PROXY INFO: proxy service: incoming request from [9839:2a00:6050:0:8039:2a00:6050:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 1989fe3d-1293d60e-4b5d8fee-63da585c 2024-11-21T23:18:05.703593Z node 6 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [1989fe3d-1293d60e-4b5d8fee-63da585c] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2024-11-21T23:18:05.703746Z node 6 :HTTP DEBUG: (#44,[::1]:44858) <- (400 InvalidAction) 2024-11-21T23:18:05.703800Z node 6 :HTTP DEBUG: (#44,[::1]:44858) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2024-11-21T23:18:05.703828Z node 6 :HTTP DEBUG: (#44,[::1]:44858) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 1989fe3d-1293d60e-4b5d8fee-63da585c x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2024-11-21T23:18:05.703897Z node 6 :HTTP DEBUG: (#44,[::1]:44858) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 2974, MsgBus: 62487 2024-11-21T23:17:46.847979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875778259286328:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:46.848057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002917/r3tmp/tmp4RDf9K/pdisk_1.dat 2024-11-21T23:17:47.257934Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2974, node 1 2024-11-21T23:17:47.292957Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:47.293099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:47.298231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:47.351051Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:47.351073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:47.351082Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:47.351171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62487 TClient is connected to server localhost:62487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:47.885439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:47.912453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:17:48.060671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:48.355829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:48.480725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:50.519306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875795439157223:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:50.525869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:50.567694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:50.604096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:50.641802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:50.682413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:50.714727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:50.755533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:50.804154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875795439157717:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:50.804307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:50.804590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875795439157722:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:50.809496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:50.820220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875795439157724:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:51.849076Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875778259286328:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:51.849467Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26888, MsgBus: 28770 2024-11-21T23:17:52.928180Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875804660049044:2120];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:52.935660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002917/r3tmp/tmpxNMRQR/pdisk_1.dat 2024-11-21T23:17:53.035581Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:53.059350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:53.059457Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:53.061990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26888, node 2 2024-11-21T23:17:53.108851Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:53.108878Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:53.108886Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:53.108980Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28770 TClient is connected to server localhost:28770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:53.570878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:53.588808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:53.665077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:17:53.834017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:53.926126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:56.279891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875821839919854:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:56.279980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:56.332492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:56.383076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:56.413363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:56.483789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:56.522051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:56.595456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:56.733412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875821839920356:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:56.733552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:56.734024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875821839920361:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:56.738180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:56.752146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875821839920363:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:57.943073Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875804660049044:2120];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:57.943661Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8794, MsgBus: 63282 2024-11-21T23:17:59.529014Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875835845199286:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002917/r3tmp/tmpnfN45N/pdisk_1.dat 2024-11-21T23:17:59.616288Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:17:59.684501Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8794, node 3 2024-11-21T23:17:59.825590Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:59.825696Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:59.829227Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:59.830018Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:59.830041Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:59.830049Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:59.830152Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63282 TClient is connected to server localhost:63282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:00.609890Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.651670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.766668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.970060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:01.062055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:03.563020Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875853025070006:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.563142Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.628938Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.734752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.810618Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.896848Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.942942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.005093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.093265Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875857320037809:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.093346Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.093622Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875857320037814:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.097335Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:04.110260Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875857320037816:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:04.515666Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875835845199286:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:04.515743Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpParams::Decimal-QueryService [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |86.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> KqpLimits::WaitCAsStateOnAbort >> KqpLimits::KqpMkqlMemoryLimitException >> KqpQuery::RewriteIfPresentToMap >> Viewer::SimpleFeatureFlags [GOOD] |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::ReadsetCountLimit |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18461, MsgBus: 3768 2024-11-21T23:17:47.390734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875785258113015:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:47.390841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002910/r3tmp/tmpHB33a4/pdisk_1.dat 2024-11-21T23:17:47.821480Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18461, node 1 2024-11-21T23:17:47.833448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:47.833544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:47.835203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:47.888818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:47.888841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:47.888848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:47.888956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3768 TClient is connected to server localhost:3768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:48.598686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:48.630775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:48.787545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:17:48.943987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:49.028756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:51.163412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875798143016616:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:51.171946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:51.211391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:51.308543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:51.361849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:51.399826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:51.446879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:51.521008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:51.608875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875802437984419:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:51.608960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:51.609478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875802437984424:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:51.614217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:51.624430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875802437984426:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:52.393735Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875785258113015:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:52.393833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20093, MsgBus: 11219 2024-11-21T23:17:53.843195Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875810810238979:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:53.843260Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002910/r3tmp/tmpmZDEz5/pdisk_1.dat 2024-11-21T23:17:53.976236Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:54.001557Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:54.001645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:54.008077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20093, node 2 2024-11-21T23:17:54.054884Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:54.054908Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:54.054915Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:54.055020Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11219 TClient is connected to server localhost:11219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:54.579141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:54.589751Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:17:54.596967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:54.653960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:54.862972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:54.950120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:57.130045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875827990109879:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... 644480 2024-11-21T23:17:57.464938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875827990110385:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:17:58.848956Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875810810238979:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:58.849280Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27870, MsgBus: 31112 2024-11-21T23:17:59.712999Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875833866345544:2179];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002910/r3tmp/tmpqedZCH/pdisk_1.dat 2024-11-21T23:17:59.792794Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:17:59.841921Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:59.853810Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:59.853906Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:59.862811Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27870, node 3 2024-11-21T23:17:59.928097Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:59.928118Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:59.928126Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:59.928226Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31112 TClient is connected to server localhost:31112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:18:00.462944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:18:00.485864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.553509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.762063Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:18:00.854469Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.775738Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875851046216285:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.775877Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.848146Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.900471Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.946318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.034128Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.071787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.168450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.233501Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875855341184087:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.233586Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.234189Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875855341184092:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.239210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:04.256501Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T23:18:04.259728Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875855341184095:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:04.700132Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875833866345544:2179];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:04.700212Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:05.659133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:06.548953Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439875863931119289:2494], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2024-11-21T23:18:06.549272Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTM2OTUwN2YtMmU1ZTQyMzktMmRkOTNjMjUtYmI5ZjFkMjE=, ActorId: [3:7439875859636151707:2460], ActorState: ExecuteState, TraceId: 01jd8g9gdzfqh121p8t79t02w1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T23:18:06.651318Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTM2OTUwN2YtMmU1ZTQyMzktMmRkOTNjMjUtYmI5ZjFkMjE=, ActorId: [3:7439875859636151707:2460], ActorState: ExecuteState, TraceId: 01jd8g9gf1eyzsc9g6413p9n3j, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2024-11-21T23:18:06.717486Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439875863931119306:2500], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T23:18:06.718366Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTM2OTUwN2YtMmU1ZTQyMzktMmRkOTNjMjUtYmI5ZjFkMjE=, ActorId: [3:7439875859636151707:2460], ActorState: ExecuteState, TraceId: 01jd8g9gjk9pr8n257yd3cbb26, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T23:18:06.750256Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439875863931119318:2505], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T23:18:06.750558Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTM2OTUwN2YtMmU1ZTQyMzktMmRkOTNjMjUtYmI5ZjFkMjE=, ActorId: [3:7439875859636151707:2460], ActorState: ExecuteState, TraceId: 01jd8g9gmfetsvz8ysy0xz4qv5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin+ColumnStore >> KqpStats::StatsProfile >> TColumnShardTestSchema::EnableColdTiersAfterTtl >> TColumnShardTestSchema::ExternalTTL_Types ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: 2024-11-21T23:16:27.743036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:27.743389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:27.743432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 3507, node 1 TClient is connected to server localhost:18263 2024-11-21T23:16:36.197751Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:36.198011Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:36.198111Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 21249, node 2 TClient is connected to server localhost:16839 2024-11-21T23:16:45.752010Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:45.752197Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:45.752276Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 65102, node 3 TClient is connected to server localhost:8616 2024-11-21T23:16:58.289719Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:89:2135], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:58.290220Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:58.290579Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 11865, node 4 TClient is connected to server localhost:31215 2024-11-21T23:17:12.461088Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:12.461295Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:12.461429Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 22190, node 5 TClient is connected to server localhost:64276 2024-11-21T23:17:24.745839Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:24.746061Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:24.746166Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 26222, node 6 TClient is connected to server localhost:27882 2024-11-21T23:17:37.892005Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:37.892578Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:37.892704Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 26882, node 7 TClient is connected to server localhost:26368 2024-11-21T23:17:52.859674Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:452:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:52.860059Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:52.860215Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:17:53.333706Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:53.496636Z node 8 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T23:17:53.535669Z node 8 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T23:17:54.307262Z node 8 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 20189, node 8 TClient is connected to server localhost:16743 2024-11-21T23:17:55.069603Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:55.069720Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:55.069816Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:55.080225Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:17:59.375826Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7439875836303713509:2127];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:59.375953Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:17:59.741237Z node 11 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:59.797169Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:59.797414Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:59.800016Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7755, node 11 2024-11-21T23:17:59.986121Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:59.986168Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:59.986194Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:59.986454Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61522 2024-11-21T23:18:04.376883Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7439875836303713509:2127];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:04.376982Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpStats::JoinNoStatsScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:13:33.763018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:13:33.763886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.763957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:13:33.764017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:13:33.764066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:13:33.764106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:13:33.764168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:13:33.767471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:13:33.888310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:13:33.888364Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:13:33.927534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:13:33.930760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:13:33.930875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:13:33.962264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:13:33.965386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:13:33.966570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:33.968744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:13:33.977812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:33.985572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:33.996507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:13:33.996616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:33.996686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:13:33.996840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.013910Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:13:34.171199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:13:34.171388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.171578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:13:34.171803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:13:34.171862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.173693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.173811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:13:34.173928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.173976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:13:34.174001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:13:34.174028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:13:34.175752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.175811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:13:34.175844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:13:34.177487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.177548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.177591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.177644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.181321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:13:34.183096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:13:34.183280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:13:34.184322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:13:34.184431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:13:34.184478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.184842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:13:34.184921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:13:34.185104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.185178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:13:34.187112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:13:34.187156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:13:34.187327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:13:34.187389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:13:34.187767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:13:34.187816Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:13:34.187909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:13:34.187965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.188025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:13:34.188063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:13:34.188095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:13:34.188133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:13:34.188195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:13:34.188232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:13:34.188265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:13:34.190077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.190188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:13:34.190229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:13:34.190264Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:13:34.190318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:13:34.190504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... hType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:18:08.889759Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:18:08.890141Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir/Table/UserDefinedIndex" took 439us result status StatusSuccess 2024-11-21T23:18:08.897234Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:18:08.899058Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:18:08.899626Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" took 574us result status StatusSuccess 2024-11-21T23:18:08.904569Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TestKinesisHttpProxy::ListShardsToken [GOOD] |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |86.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/ydb-public-sdk-cpp-client-ydb_federated_topic-ut >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> KqpExplain::SqlIn >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpFlipJoin::RightOnly_2 [GOOD] >> KqpFlipJoin::RightOnly_3 >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |86.5%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> KqpExplain::PrecomputeRange >> TestYmqHttpProxy::TestDeleteQueue [GOOD] |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |86.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] Test command err: 2024-11-21T23:17:18.769154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875658470305869:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:18.769214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00250a/r3tmp/tmpQqdUVc/pdisk_1.dat 2024-11-21T23:17:19.163423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:19.163567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:19.165168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:19.217630Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62065, node 1 2024-11-21T23:17:19.339988Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:19.340014Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:19.340021Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:19.340132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:19.652840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.663545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.663632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.664429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:19.664649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:19.664668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:19.665299Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:19.665320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:19.665601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.667063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231039714, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:19.667105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:19.667366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:19.668025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.668187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.668238Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:19.668322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:19.668360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:19.668393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:17:19.671184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:19.671257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:19.671303Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:19.671395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2024-11-21T23:17:19.675424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:5319 2024-11-21T23:17:19.915712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.915982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.916015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.918037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:19.918206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:19.920228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231039966, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:19.920329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231039966, at schemeshard: 72057594046644480 2024-11-21T23:17:19.920520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:19.920606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:19.920629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:17:19.920987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.921176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.921538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:19.921557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:19.921573Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:19.921607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2024-11-21T23:17:19.922837Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:19.924254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.924468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.924492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.924550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:19.924628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:19.924646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:19.925065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:19.925173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.925463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.926017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:19.926079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:19.926093Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:19.926165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T23:17:19.927084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.927315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.927897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECT ... 5:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.184753Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231093229, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:18:13.184784Z node 7 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId#281474976710695:0 HandleReply TEvOperationPlan, step: 1732231093229, at tablet: 72057594046644480 2024-11-21T23:18:13.184967Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710695:0 128 -> 240 2024-11-21T23:18:13.185536Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:13.185795Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:13.185844Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710695:0 ProgressState 2024-11-21T23:18:13.185925Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710695:0 progress is 1/1 2024-11-21T23:18:13.185974Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710695:0 2024-11-21T23:18:13.186181Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710695, publications: 1, subscribers: 1 2024-11-21T23:18:13.188825Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710695 2024-11-21T23:18:13.188872Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710695 2024-11-21T23:18:13.188893Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710695, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 5 2024-11-21T23:18:13.188972Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710695, subscribers: 1 Http output full {"Consumer":{"ConsumerCreationTimestamp":1732231.093,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}} 2024-11-21T23:18:13.189803Z node 7 :HTTP_PROXY INFO: http request [RegisterStreamConsumer] requestId [fc74ff7d-406bc51c-f82eb4ff-56375b31] reply ok 2024-11-21T23:18:13.190071Z node 7 :HTTP DEBUG: (#44,[::1]:57558) <- (200 ) 2024-11-21T23:18:13.190193Z node 7 :HTTP DEBUG: (#44,[::1]:57558) connection closed 200 {"Consumer":{"ConsumerCreationTimestamp":1732231.093,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}} 2024-11-21T23:18:13.191333Z node 7 :HTTP DEBUG: (#47,[::1]:57572) incoming connection opened 2024-11-21T23:18:13.191422Z node 7 :HTTP DEBUG: (#47,[::1]:57572) -> (POST /Root) 2024-11-21T23:18:13.191560Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [786c:4d00:6050:0:606c:4d00:6050:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 5e685fea-758477f-41eaea44-fecec6fe E0000 00:00:1732231093.191962 336565 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732231093.192452 336565 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732231093.192505 336565 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732231093.192648 336565 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1732231093.192689 336565 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T23:18:13.192052Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [5e685fea-758477f-41eaea44-fecec6fe] got new request from [786c:4d00:6050:0:606c:4d00:6050:0] database '/Root' stream 'teststream' 2024-11-21T23:18:13.192421Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [5e685fea-758477f-41eaea44-fecec6fe] [auth] Authorized successfully 2024-11-21T23:18:13.192600Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [5e685fea-758477f-41eaea44-fecec6fe] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:13.195274Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [5e685fea-758477f-41eaea44-fecec6fe] reply ok 2024-11-21T23:18:13.195625Z node 7 :HTTP DEBUG: (#47,[::1]:57572) <- (200 ) 2024-11-21T23:18:13.195741Z node 7 :HTTP DEBUG: (#47,[::1]:57572) connection closed Http output full {"NextToken":"CMm3k4i1MhABGAEiCnRlc3RzdHJlYW0=","Consumers":[{"ConsumerCreationTimestamp":1732231093,"ConsumerArn":"","ConsumerName":"user1","ConsumerStatus":"ACTIVE"}]} 200 {"NextToken":"CMm3k4i1MhABGAEiCnRlc3RzdHJlYW0=","Consumers":[{"ConsumerCreationTimestamp":1732231093,"ConsumerArn":"","ConsumerName":"user1","ConsumerStatus":"ACTIVE"}]} 2024-11-21T23:18:13.198775Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439875889672768329:2442]: Pool not found 2024-11-21T23:18:13.198799Z node 7 :HTTP DEBUG: (#44,[::1]:57586) incoming connection opened 2024-11-21T23:18:13.198880Z node 7 :HTTP DEBUG: (#44,[::1]:57586) -> (POST /Root) 2024-11-21T23:18:13.199276Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [18cd:f01:6050:0:cd:f01:6050:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: a6e9de1-b0902215-6c811aca-f0be5bf8 2024-11-21T23:18:13.200016Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [a6e9de1-b0902215-6c811aca-f0be5bf8] got new request from [18cd:f01:6050:0:cd:f01:6050:0] database '/Root' stream '' 2024-11-21T23:18:13.200719Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [a6e9de1-b0902215-6c811aca-f0be5bf8] [auth] Authorized successfully 2024-11-21T23:18:13.201016Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [a6e9de1-b0902215-6c811aca-f0be5bf8] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:13.201829Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [a6e9de1-b0902215-6c811aca-f0be5bf8] reply with status: BAD_REQUEST message:
: Error: Provided NextToken is malformed, code: 500040 2024-11-21T23:18:13.202257Z node 7 :HTTP DEBUG: (#44,[::1]:57586) <- (400 InvalidArgumentException) 2024-11-21T23:18:13.202321Z node 7 :HTTP DEBUG: (#44,[::1]:57586) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.ListStreamConsumers X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 61 { "NextToken":"CMm3k4i1MhABGAEiCnRlc3RzdHJlYW0=garbage", "StreamArn":"", "MaxResults":100 } 0 2024-11-21T23:18:13.202364Z node 7 :HTTP DEBUG: (#44,[::1]:57586) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: a6e9de1-b0902215-6c811aca-f0be5bf8 x-amz-crc32: 40394744 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T23:18:13.202498Z node 7 :HTTP DEBUG: (#44,[::1]:57586) connection closed Http output full {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 400 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T23:18:13.206093Z node 7 :HTTP DEBUG: (#47,[::1]:57600) incoming connection opened 2024-11-21T23:18:13.206161Z node 7 :HTTP DEBUG: (#47,[::1]:57600) -> (POST /Root) 2024-11-21T23:18:13.206307Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [98a6:701:6050:0:80a6:701:6050:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: 558725a-89e102f5-7947ae6f-3eab614b 2024-11-21T23:18:13.206850Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [558725a-89e102f5-7947ae6f-3eab614b] got new request from [98a6:701:6050:0:80a6:701:6050:0] database '/Root' stream '' 2024-11-21T23:18:13.207407Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [558725a-89e102f5-7947ae6f-3eab614b] [auth] Authorized successfully 2024-11-21T23:18:13.207710Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [558725a-89e102f5-7947ae6f-3eab614b] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:13.208331Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [558725a-89e102f5-7947ae6f-3eab614b] reply with status: BAD_REQUEST message:
: Error: Provided NextToken is malformed, code: 500040 2024-11-21T23:18:13.208474Z node 7 :HTTP DEBUG: (#47,[::1]:57600) <- (400 InvalidArgumentException) 2024-11-21T23:18:13.208528Z node 7 :HTTP DEBUG: (#47,[::1]:57600) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.ListStreamConsumers X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "NextToken":"Y18AjrkckeND9kjnbEhjhkX$^[]?!", "StreamArn":"", "MaxResults":100 } 0 2024-11-21T23:18:13.208572Z node 7 :HTTP DEBUG: (#47,[::1]:57600) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 558725a-89e102f5-7947ae6f-3eab614b x-amz-crc32: 40394744 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T23:18:13.208702Z node 7 :HTTP DEBUG: (#47,[::1]:57600) connection closed Http output full {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 400 {"__type":"InvalidArgumentException","message":"
: Error: Provided NextToken is malformed, code: 500040\n"} 2024-11-21T23:18:13.209686Z node 7 :HTTP DEBUG: (#44,[::1]:57602) incoming connection opened 2024-11-21T23:18:13.209752Z node 7 :HTTP DEBUG: (#44,[::1]:57602) -> (POST /Root) 2024-11-21T23:18:13.209868Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b825:c300:6050:0:a025:c300:6050:0] request [ListStreamConsumers] url [/Root] database [/Root] requestId: f16ab403-462f6e54-d533ad51-3ec8278b 2024-11-21T23:18:13.210250Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [f16ab403-462f6e54-d533ad51-3ec8278b] got new request from [b825:c300:6050:0:a025:c300:6050:0] database '/Root' stream 'teststream' 2024-11-21T23:18:13.210842Z node 7 :HTTP_PROXY DEBUG: http request [ListStreamConsumers] requestId [f16ab403-462f6e54-d533ad51-3ec8278b] [auth] Authorized successfully 2024-11-21T23:18:13.210960Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [f16ab403-462f6e54-d533ad51-3ec8278b] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"NextToken":"","Consumers":[{"ConsumerCreationTimestamp":1732231093,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}]} 200 {"NextToken":"","Consumers":[{"ConsumerCreationTimestamp":1732231093,"ConsumerArn":"","ConsumerName":"user2","ConsumerStatus":"ACTIVE"}]} 2024-11-21T23:18:13.212304Z node 7 :HTTP_PROXY INFO: http request [ListStreamConsumers] requestId [f16ab403-462f6e54-d533ad51-3ec8278b] reply ok 2024-11-21T23:18:13.212556Z node 7 :HTTP DEBUG: (#44,[::1]:57602) <- (200 ) 2024-11-21T23:18:13.212629Z node 7 :HTTP DEBUG: (#44,[::1]:57602) connection closed >> TestYmqHttpProxy::TestDeleteMessageBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] Test command err: 2024-11-21T23:17:19.526033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875663641106514:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:19.526749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002470/r3tmp/tmpm9GPcI/pdisk_1.dat 2024-11-21T23:17:20.034528Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:20.037215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:20.037320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:20.040961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23907, node 1 2024-11-21T23:17:20.211067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:20.211090Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:20.211096Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:20.211184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21780 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:20.580943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.589617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.589677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.590268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:20.590385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 waiting... 2024-11-21T23:17:20.594242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:20.594826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:20.594855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:20.595180Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.595691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:20.596682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040645, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.596728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:20.596961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:20.597458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.597544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.597571Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:20.597621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:20.597640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:20.597664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:17:20.600096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:20.600142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:20.600154Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:20.600211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:21780 2024-11-21T23:17:20.815725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.815961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.816005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.816430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:20.816500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.817874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040862, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.817897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231040862, at schemeshard: 72057594046644480 2024-11-21T23:17:20.818091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:20.818161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:20.818189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:17:20.818603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.818719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.820257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:20.820295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:20.820310Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:20.820361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2024-11-21T23:17:20.826249Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:20.827786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.828014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.828043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.828096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:20.828154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:20.828170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:20.828599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:20.828688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.828929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.829746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:20.829798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:20.829812Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:20.829862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T23:17:20.832285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.832411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.832838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE ... requestId [59198f3e-b2fdc4d2-40fc1a7a-e714d3c4] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:13.669143Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [59198f3e-b2fdc4d2-40fc1a7a-e714d3c4] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:13.670520Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4, operationId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.670621Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715705:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/Root/SQS/cloud4', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 28], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2024-11-21T23:18:13.671181Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715705, database: /Root, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/Root/SQS/cloud4', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 28], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /Root/SQS/cloud4 2024-11-21T23:18:13.751947Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0, operationId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.752253Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715710:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:18:13.753309Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715710, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0 2024-11-21T23:18:13.753508Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:13.753764Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:13.753823Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715710:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:18:13.755778Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715710 2024-11-21T23:18:13.755838Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715710 2024-11-21T23:18:13.755866Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715710, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 7 2024-11-21T23:18:13.756134Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715710 2024-11-21T23:18:13.756161Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715710 2024-11-21T23:18:13.756176Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715710, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 3 2024-11-21T23:18:13.758087Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231093803, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:18:13.758146Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715710:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732231093803, at schemeshard: 72057594046644480 2024-11-21T23:18:13.758296Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715710:0 128 -> 240 2024-11-21T23:18:13.759063Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:13.759408Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:13.759471Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715710:0 ProgressState 2024-11-21T23:18:13.759598Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715710:0 progress is 1/1 2024-11-21T23:18:13.759646Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715710:0 2024-11-21T23:18:13.759691Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715710, publications: 2, subscribers: 0 2024-11-21T23:18:13.761468Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976715710 2024-11-21T23:18:13.761509Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715710 2024-11-21T23:18:13.761533Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715710, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 8 2024-11-21T23:18:13.761746Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715710 2024-11-21T23:18:13.761769Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715710 2024-11-21T23:18:13.761782Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715710, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 4 2024-11-21T23:18:13.761832Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715710, subscribers: 0 2024-11-21T23:18:13.775719Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715710, at schemeshard: 72057594046644480 2024-11-21T23:18:13.782071Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0/v4, operationId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.782347Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715711:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:18:13.783260Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715711, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0/v4 2024-11-21T23:18:13.783455Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:13.783706Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:13.783767Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715711:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:18:13.785327Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T23:18:13.785368Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T23:18:13.785391Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 5 2024-11-21T23:18:13.785609Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T23:18:13.785634Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T23:18:13.785651Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 3 2024-11-21T23:18:13.787226Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715711, at schemeshard: 72057594046644480 2024-11-21T23:18:13.788222Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231093831, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:18:13.788259Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976715711:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732231093831, at schemeshard: 72057594046644480 2024-11-21T23:18:13.788408Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715711:0 128 -> 240 2024-11-21T23:18:13.789058Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:13.789272Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:13.789332Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715711:0 ProgressState 2024-11-21T23:18:13.789424Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715711:0 progress is 1/1 2024-11-21T23:18:13.789465Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715711:0 2024-11-21T23:18:13.789507Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715711, publications: 2, subscribers: 1 2024-11-21T23:18:13.791471Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T23:18:13.791546Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T23:18:13.791570Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 6 2024-11-21T23:18:13.791791Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715711 2024-11-21T23:18:13.791813Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715711 2024-11-21T23:18:13.791827Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715711, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 4 2024-11-21T23:18:13.791871Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715711, subscribers: 1 2024-11-21T23:18:13.906928Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439875897418199030:2465]: Pool not found 2024-11-21T23:18:13.908331Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [59198f3e-b2fdc4d2-40fc1a7a-e714d3c4] Got succesfult GRPC response. 2024-11-21T23:18:13.908380Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [59198f3e-b2fdc4d2-40fc1a7a-e714d3c4] reply ok 2024-11-21T23:18:13.908512Z node 7 :HTTP DEBUG: (#44,[::1]:55120) <- (200 ) 2024-11-21T23:18:13.908635Z node 7 :HTTP DEBUG: (#44,[::1]:55120) connection closed Http output full {"QueueUrl":"http://ghrun-uc25mmfz34.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo"} >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2024-11-21T23:17:18.723015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875658990341170:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:18.723149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024f4/r3tmp/tmpGFfNn8/pdisk_1.dat 2024-11-21T23:17:19.140349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:19.140510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:19.149215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:19.183277Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14871, node 1 2024-11-21T23:17:19.263194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:19.263218Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:19.263240Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:19.263325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:19.659983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.666816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.666905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.667659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:19.667864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:19.667881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:19.668640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:19.668678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:19.669065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.671311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231039714, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:19.671356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:19.671618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:19.672495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.672678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.672741Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:19.672828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:19.672868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:19.672905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 waiting... 2024-11-21T23:17:19.677791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:19.677870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:19.677888Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:19.678064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T23:17:19.678435Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:12786 2024-11-21T23:17:19.971185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.971429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.971455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.972106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:19.972281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.973705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040022, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:19.973754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231040022, at schemeshard: 72057594046644480 2024-11-21T23:17:19.973984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:19.974062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:19.974093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 waiting... 2024-11-21T23:17:19.975573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.975783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.977048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:19.977084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:19.977102Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:19.977184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2024-11-21T23:17:19.979065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:19.982070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.982305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.982332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.982432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:19.982525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:19.982539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:19.982957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:19.983089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.983313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.984208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:19.984243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:19.984256Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:19.984318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T23:17:19.987221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.987408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.988281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRE ... emeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715691 2024-11-21T23:18:13.492061Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715691 2024-11-21T23:18:13.492076Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715691, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 3 2024-11-21T23:18:13.492119Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715691, subscribers: 1 Http output full {} 200 {} 2024-11-21T23:18:13.494664Z node 7 :HTTP_PROXY INFO: http request [CreateStream] requestId [1a6af21d-89be1b43-22fe651d-dac79489] reply ok 2024-11-21T23:18:13.495022Z node 7 :HTTP DEBUG: (#44,[::1]:59184) <- (200 ) 2024-11-21T23:18:13.495124Z node 7 :HTTP DEBUG: (#44,[::1]:59184) connection closed 2024-11-21T23:18:13.498706Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Registered with mediator time cast 2024-11-21T23:18:13.498742Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Registered with mediator time cast 2024-11-21T23:18:13.498757Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2024-11-21T23:18:13.498778Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Registered with mediator time cast 2024-11-21T23:18:13.498795Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Registered with mediator time cast 2024-11-21T23:18:13.506643Z node 7 :HTTP DEBUG: (#44,[::1]:59190) incoming connection opened 2024-11-21T23:18:13.506727Z node 7 :HTTP DEBUG: (#44,[::1]:59190) -> (POST /Root) 2024-11-21T23:18:13.506895Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [7878:a100:6050:0:6078:a100:6050:0] request [ListShards] url [/Root] database [/Root] requestId: be2cb8a6-4794f187-aa071551-272b32d8 2024-11-21T23:18:13.507368Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [be2cb8a6-4794f187-aa071551-272b32d8] got new request from [7878:a100:6050:0:6078:a100:6050:0] database '/Root' stream 'teststream' E0000 00:00:1732231093.510021 336664 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T23:18:13.509796Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [be2cb8a6-4794f187-aa071551-272b32d8] [auth] Authorized successfully 2024-11-21T23:18:13.509933Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [be2cb8a6-4794f187-aa071551-272b32d8] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:13.517008Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:13.517053Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439875896802365491:2482], now have 1 active actors on pipe 2024-11-21T23:18:13.517083Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:13.517104Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439875896802365492:2483], now have 1 active actors on pipe 2024-11-21T23:18:13.518252Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [be2cb8a6-4794f187-aa071551-272b32d8] reply ok 2024-11-21T23:18:13.518534Z node 7 :HTTP DEBUG: (#44,[::1]:59190) <- (200 ) 2024-11-21T23:18:13.518640Z node 7 :HTTP DEBUG: (#44,[::1]:59190) connection closed Http output full {"NextToken":"CI26k4i1MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CI26k4i1MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T23:18:13.518889Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:13.518919Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439875896802365491:2482] destroyed 2024-11-21T23:18:13.518940Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:13.518960Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439875896802365492:2483] destroyed 2024-11-21T23:18:13.522637Z node 7 :HTTP DEBUG: (#44,[::1]:59200) incoming connection opened 2024-11-21T23:18:13.522712Z node 7 :HTTP DEBUG: (#44,[::1]:59200) -> (POST /Root) 2024-11-21T23:18:13.522885Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d8e1:b600:6050:0:c0e1:b600:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 52ac044-cbd1e70f-9006d7f6-62a9b33 2024-11-21T23:18:13.523371Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [52ac044-cbd1e70f-9006d7f6-62a9b33] got new request from [d8e1:b600:6050:0:c0e1:b600:6050:0] database '/Root' stream 'teststream' E0000 00:00:1732231093.524100 336664 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T23:18:13.523916Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [52ac044-cbd1e70f-9006d7f6-62a9b33] [auth] Authorized successfully 2024-11-21T23:18:13.524033Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [52ac044-cbd1e70f-9006d7f6-62a9b33] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"NextToken":"CJW6k4i1MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CJW6k4i1MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2024-11-21T23:18:13.525396Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:13.525437Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439875896802365503:2487], now have 1 active actors on pipe 2024-11-21T23:18:13.525468Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:13.525488Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439875896802365504:2488], now have 1 active actors on pipe 2024-11-21T23:18:13.526483Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [52ac044-cbd1e70f-9006d7f6-62a9b33] reply ok 2024-11-21T23:18:13.526870Z node 7 :HTTP DEBUG: (#44,[::1]:59200) <- (200 ) 2024-11-21T23:18:13.526983Z node 7 :HTTP DEBUG: (#44,[::1]:59200) connection closed 2024-11-21T23:18:13.527305Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:13.527334Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439875896802365503:2487] destroyed 2024-11-21T23:18:13.527357Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:13.527375Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439875896802365504:2488] destroyed 2024-11-21T23:18:13.531464Z node 7 :HTTP DEBUG: (#47,[::1]:59212) incoming connection opened 2024-11-21T23:18:13.531563Z node 7 :HTTP DEBUG: (#47,[::1]:59212) -> (POST /Root) 2024-11-21T23:18:13.531692Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [98d6:6800:6050:0:80d6:6800:6050:0] request [ListShards] url [/Root] database [/Root] requestId: f9384897-ae72d169-b5d00911-7276e0b5 2024-11-21T23:18:13.532221Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f9384897-ae72d169-b5d00911-7276e0b5] got new request from [98d6:6800:6050:0:80d6:6800:6050:0] database '/Root' stream 'teststream' E0000 00:00:1732231093.541833 336665 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2024-11-21T23:18:13.541579Z node 7 :HTTP_PROXY DEBUG: http request [ListShards] requestId [f9384897-ae72d169-b5d00911-7276e0b5] [auth] Authorized successfully 2024-11-21T23:18:13.541746Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f9384897-ae72d169-b5d00911-7276e0b5] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:13.543618Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:13.543667Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439875896802365515:2492], now have 1 active actors on pipe 2024-11-21T23:18:13.543718Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:13.543739Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439875896802365516:2493], now have 1 active actors on pipe 2024-11-21T23:18:13.545128Z node 7 :HTTP_PROXY INFO: http request [ListShards] requestId [f9384897-ae72d169-b5d00911-7276e0b5] reply ok 2024-11-21T23:18:13.545355Z node 7 :HTTP DEBUG: (#47,[::1]:59212) <- (200 ) 2024-11-21T23:18:13.545477Z node 7 :HTTP DEBUG: (#47,[::1]:59212) connection closed 2024-11-21T23:18:13.545697Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:13.545728Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439875896802365515:2492] destroyed 2024-11-21T23:18:13.545755Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:13.545783Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439875896802365516:2493] destroyed Http output full {"NextToken":"CKi6k4i1MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CKi6k4i1MhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} >> KqpJoin::RightTableIndexPredicate [GOOD] >> KqpJoin::RightTableValuePredicate >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2024-11-21T23:17:20.102685Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875668968892075:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:20.105946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002468/r3tmp/tmplSRiMR/pdisk_1.dat 2024-11-21T23:17:20.769727Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:20.772178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:20.772264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:20.775714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25505, node 1 2024-11-21T23:17:20.842648Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:20.842667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:20.842677Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:20.842775Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:21.140956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:21.147439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:21.157562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:21.158415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:21.158731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:21.158758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:21.159548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:21.159579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:21.160105Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:21.162120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231041205, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:21.162164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:21.166699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:21.167820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:21.167984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:21.168034Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:21.168167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:21.168204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:21.168253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:17:21.170241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:21.170317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:21.170336Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:21.170422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T23:17:21.170723Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:19328 waiting... 2024-11-21T23:17:21.387918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:21.388339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:21.388386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:21.389038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:21.389215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:21.391739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:21.394698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231041436, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:21.394744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231041436, at schemeshard: 72057594046644480 2024-11-21T23:17:21.394999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:21.395325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:21.395370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 1 2024-11-21T23:17:21.395846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:21.396000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:21.397396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:21.397434Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:21.397450Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:21.397532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:17:21.399432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:21.399672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:21.399689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:21.399732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:21.399787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:21.399799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:21.400145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:21.400225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:21.400395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:21.401138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:21.401169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:21.401179Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:21.401219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T23:17:21.402647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:21.402844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:21.403414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE ... 4-11-21T23:18:12.269898Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715685:0 129 -> 240 2024-11-21T23:18:12.270271Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715685:0 ProgressState 2024-11-21T23:18:12.270348Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715685:0 progress is 1/1 2024-11-21T23:18:12.270410Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715685:0 2024-11-21T23:18:12.274151Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/SQS/.STD/MessageData, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T23:18:12.274717Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 1 -> 2 2024-11-21T23:18:12.276198Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715686:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:18:12.276229Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T23:18:12.277059Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715686, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/SQS/.STD/MessageData 2024-11-21T23:18:12.277247Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:12.277485Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:12.277546Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 ProgressState, operation type: TxCreateTable, at tablet72057594046644480 2024-11-21T23:18:12.278666Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 15 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T23:18:12.278710Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T23:18:12.278728Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 15 2024-11-21T23:18:12.278930Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T23:18:12.278952Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T23:18:12.278973Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 2 waiting... 2024-11-21T23:18:12.280818Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715686, at schemeshard: 72057594046644480 2024-11-21T23:18:12.281052Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715686:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2024-11-21T23:18:12.281119Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 2 -> 3 2024-11-21T23:18:12.281915Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 ProgressState at tabletId# 72057594046644480 2024-11-21T23:18:12.305995Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId#281474976715686:0 HandleReply TEvProposeTransactionResult at tabletId# 72057594046644480 2024-11-21T23:18:12.306022Z node 7 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046644480 2024-11-21T23:18:12.306100Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 3 -> 128 2024-11-21T23:18:12.306654Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply ProgressState at tablet: 72057594046644480 2024-11-21T23:18:12.308092Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231092354, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:18:12.308139Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId#281474976715686:0 HandleReply TEvOperationPlan at tablet: 72057594046644480, stepId: 1732231092354 2024-11-21T23:18:12.308248Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 128 -> 129 2024-11-21T23:18:12.309039Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:12.309473Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:12.309542Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 ProgressState at tablet: 72057594046644480 2024-11-21T23:18:12.310890Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 5 Version: 16 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T23:18:12.310933Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T23:18:12.310953Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 5], version: 16 2024-11-21T23:18:12.311159Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 27 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715686 2024-11-21T23:18:12.311186Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715686 2024-11-21T23:18:12.311197Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715686, pathId: [OwnerId: 72057594046644480, LocalPathId: 27], version: 4 2024-11-21T23:18:12.312396Z node 7 :FLAT_TX_SCHEMESHARD INFO: Unexpected message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037906 Status: COMPLETE TxId: 281474976715686 Step: 1732231092354 OrderId: 281474976715686 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037906 CpuTimeUsec: 1352 } } 2024-11-21T23:18:12.313682Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715686:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2024-11-21T23:18:12.313718Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T23:18:12.313747Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715686:0 129 -> 240 2024-11-21T23:18:12.314420Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2024-11-21T23:18:12.314505Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2024-11-21T23:18:12.314552Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2024-11-21T23:18:13.442992Z node 7 :HTTP INFO: Listening on http://[::]:16130 2024-11-21T23:18:13.484053Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439875894472793328:2410], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T23:18:13.513703Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439875894472793383:2420], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T23:18:13.513793Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439875894472793384:2421], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T23:18:13.554058Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439875894472793382:2419], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:13.554580Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:13.783071Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439875894472793380:2418]: Pool not found 2024-11-21T23:18:14.362716Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7439875894472793378:2417]: Pool not found 2024-11-21T23:18:14.366083Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439875898767760818:2440], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.366082Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7439875898767760819:2441], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2024-11-21T23:18:14.366209Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.442855Z node 7 :HTTP DEBUG: (#44,[::1]:59568) incoming connection opened 2024-11-21T23:18:14.442944Z node 7 :HTTP DEBUG: (#44,[::1]:59568) -> (POST /Root) 2024-11-21T23:18:14.443121Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [9872:3900:6050:0:8072:3900:6050:0] request [CreateStream] url [/Root] database [/Root] requestId: 3a9d6b12-c64a75a5-5d433890-e9566ecd 2024-11-21T23:18:14.443938Z node 7 :HTTP_PROXY INFO: http request [CreateStream] requestId [3a9d6b12-c64a75a5-5d433890-e9566ecd] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map 2024-11-21T23:18:14.444276Z node 7 :HTTP DEBUG: (#44,[::1]:59568) <- (400 MissingParameter) 2024-11-21T23:18:14.444327Z node 7 :HTTP DEBUG: (#44,[::1]:59568) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2024-11-21T23:18:14.444370Z node 7 :HTTP DEBUG: (#44,[::1]:59568) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 3a9d6b12-c64a75a5-5d433890-e9566ecd x-amz-crc32: 3671469627 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map"} 2024-11-21T23:18:14.444492Z node 7 :HTTP DEBUG: (#44,[::1]:59568) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:394: Top level of json value is not a map"} >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RandomUuid >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin >> KqpStats::SysViewClientLost >> KqpTypes::QuerySpecialTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2024-11-21T23:17:18.946278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875658650282233:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:18.947158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024bc/r3tmp/tmp4PGYSM/pdisk_1.dat 2024-11-21T23:17:19.396556Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:19.404220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:19.404324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19319, node 1 2024-11-21T23:17:19.405784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:19.406509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.406535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.406601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2024-11-21T23:17:19.406659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.607751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:19.607778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:19.607784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:19.607872Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:19.989704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.996105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.996155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.996840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:19.997027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:19.997047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:19.997605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:19.997619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:19.997947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:19.999797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:20.001291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040050, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.001333Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:20.001581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:20.002176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.002330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.002378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:20.002476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:20.002504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:20.002538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2024-11-21T23:17:20.005209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:20.005243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:20.005274Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:20.005348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 TClient is connected to server localhost:17385 2024-11-21T23:17:20.255683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.255914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.255953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.256405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:20.256528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.257863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040302, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.257883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231040302, at schemeshard: 72057594046644480 2024-11-21T23:17:20.258132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:20.258206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:20.258234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 waiting... 2024-11-21T23:17:20.260982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.261176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.261587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:20.262421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:20.262476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:20.262493Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:20.262553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:17:20.267178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.267402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.267419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.267503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:20.267591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:20.267605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:20.268008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:20.268118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.268415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.269310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:20.269354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:20.269377Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:20.269427Z node 1 :FLAT_TX_S ... 15.576777Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [57b05593-85702cb1-2c0b2020-6950e9a2] reply with status: STATUS_UNDEFINED message: The batch request doesn't contain any entries. 2024-11-21T23:18:15.576990Z node 7 :HTTP DEBUG: (#44,[::1]:54020) <- (400 AWS.SimpleQueueService.EmptyBatchRequest) 2024-11-21T23:18:15.577051Z node 7 :HTTP DEBUG: (#44,[::1]:54020) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ChangeMessageVisibilityBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 84 { "QueueUrl":"http://ghrun-uc25mmfz34.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": [ ] } 0 2024-11-21T23:18:15.577114Z node 7 :HTTP DEBUG: (#44,[::1]:54020) Response: HTTP/1.1 400 AWS.SimpleQueueService.EmptyBatchRequest Connection: close x-amzn-requestid: 57b05593-85702cb1-2c0b2020-6950e9a2 x-amz-crc32: 2331440613 Content-Type: application/x-amz-json-1.1 Content-Length: 112 {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2024-11-21T23:18:15.577192Z node 7 :HTTP DEBUG: (#44,[::1]:54020) connection closed Http output full {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2024-11-21T23:18:15.578331Z node 7 :HTTP DEBUG: (#44,[::1]:54026) incoming connection opened 2024-11-21T23:18:15.578434Z node 7 :HTTP DEBUG: (#44,[::1]:54026) -> (POST /Root) 2024-11-21T23:18:15.578545Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [f856:ee00:6050:0:e056:ee00:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: 6cc4ac0b-ca2be95a-d011536e-8162c8ef 2024-11-21T23:18:15.579227Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [6cc4ac0b-ca2be95a-d011536e-8162c8ef] got new request from [f856:ee00:6050:0:e056:ee00:6050:0] 2024-11-21T23:18:15.579736Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [6cc4ac0b-ca2be95a-d011536e-8162c8ef] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:15.579765Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [6cc4ac0b-ca2be95a-d011536e-8162c8ef] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"Failed":[{"Message":"The specified message isn't in flight.","Id":"Id-0","Code":"AWS.SimpleQueueService.MessageNotInflight","SenderFault":true}]} 2024-11-21T23:18:15.601703Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [6cc4ac0b-ca2be95a-d011536e-8162c8ef] Got succesfult GRPC response. 2024-11-21T23:18:15.601929Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [6cc4ac0b-ca2be95a-d011536e-8162c8ef] reply ok 2024-11-21T23:18:15.602163Z node 7 :HTTP DEBUG: (#44,[::1]:54026) <- (200 ) 2024-11-21T23:18:15.602288Z node 7 :HTTP DEBUG: (#44,[::1]:54026) connection closed 2024-11-21T23:18:15.606795Z node 7 :HTTP DEBUG: (#44,[::1]:54034) incoming connection opened 2024-11-21T23:18:15.606887Z node 7 :HTTP DEBUG: (#44,[::1]:54034) -> (POST /Root) 2024-11-21T23:18:15.607217Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [7854:5300:6050:0:6054:5300:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: f1e38121-a942b7d7-8d77a024-593f8678 2024-11-21T23:18:15.608023Z node 7 :HTTP_PROXY WARN: http request [ChangeMessageVisibilityBatch] requestId [f1e38121-a942b7d7-8d77a024-593f8678] got new request with incorrect json from [7854:5300:6050:0:6054:5300:6050:0] 2024-11-21T23:18:15.608056Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [f1e38121-a942b7d7-8d77a024-593f8678] reply with status: BAD_REQUEST message: [json.exception.type_error.302] type must be string, but is number Http output full {"__type":"InvalidArgumentException","message":"[json.exception.type_error.302] type must be string, but is number"} 2024-11-21T23:18:15.608230Z node 7 :HTTP DEBUG: (#44,[::1]:54034) <- (400 InvalidArgumentException) 2024-11-21T23:18:15.608301Z node 7 :HTTP DEBUG: (#44,[::1]:54034) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ChangeMessageVisibilityBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked c3 { "QueueUrl":"http://ghrun-uc25mmfz34.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": [ { "Id":"Id-0", "ReceiptHandle":0 } ] } 0 2024-11-21T23:18:15.608347Z node 7 :HTTP DEBUG: (#44,[::1]:54034) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: f1e38121-a942b7d7-8d77a024-593f8678 x-amz-crc32: 3176743181 Content-Type: application/x-amz-json-1.1 Content-Length: 116 {"__type":"InvalidArgumentException","message":"[json.exception.type_error.302] type must be string, but is number"} 2024-11-21T23:18:15.608456Z node 7 :HTTP DEBUG: (#44,[::1]:54034) connection closed 2024-11-21T23:18:15.609314Z node 7 :HTTP DEBUG: (#47,[::1]:54044) incoming connection opened 2024-11-21T23:18:15.609360Z node 7 :HTTP DEBUG: (#47,[::1]:54044) -> (POST /Root) 2024-11-21T23:18:15.609465Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b80c:fd00:6050:0:a00c:fd00:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: cf753f53-81c0d400-11ccee6-5ca8e78a 2024-11-21T23:18:15.609755Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cf753f53-81c0d400-11ccee6-5ca8e78a] got new request from [b80c:fd00:6050:0:a00c:fd00:6050:0] 2024-11-21T23:18:15.610022Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [cf753f53-81c0d400-11ccee6-5ca8e78a] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:15.610036Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cf753f53-81c0d400-11ccee6-5ca8e78a] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:15.615861Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [cf753f53-81c0d400-11ccee6-5ca8e78a] Got succesfult GRPC response. 2024-11-21T23:18:15.615993Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cf753f53-81c0d400-11ccee6-5ca8e78a] reply ok 2024-11-21T23:18:15.616434Z node 7 :HTTP DEBUG: (#47,[::1]:54044) <- (200 ) 2024-11-21T23:18:15.616556Z node 7 :HTTP DEBUG: (#47,[::1]:54044) connection closed Http output full {"Failed":[{"Message":"VisibilityTimeout was not provided.","Id":"Id-0","Code":"MissingParameter","SenderFault":true}]} 2024-11-21T23:18:15.618894Z node 7 :HTTP DEBUG: (#44,[::1]:54060) incoming connection opened 2024-11-21T23:18:15.618977Z node 7 :HTTP DEBUG: (#44,[::1]:54060) -> (POST /Root) 2024-11-21T23:18:15.619101Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [58c7:a500:6050:0:40c7:a500:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: 3c73a238-bf790ee-4416570-39f97efe 2024-11-21T23:18:15.619601Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [3c73a238-bf790ee-4416570-39f97efe] got new request from [58c7:a500:6050:0:40c7:a500:6050:0] 2024-11-21T23:18:15.620001Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [3c73a238-bf790ee-4416570-39f97efe] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:15.620021Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [3c73a238-bf790ee-4416570-39f97efe] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:15.624929Z node 7 :SQS WARN: Request [3c73a238-bf790ee-4416570-39f97efe] Failed to process receipt handle : (yexception) ydb/core/ymq/base/helpers.cpp:147: Condition violated: `!decoded.empty()' 2024-11-21T23:18:15.626410Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [3c73a238-bf790ee-4416570-39f97efe] Got succesfult GRPC response. 2024-11-21T23:18:15.626556Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [3c73a238-bf790ee-4416570-39f97efe] reply ok 2024-11-21T23:18:15.627106Z node 7 :HTTP DEBUG: (#44,[::1]:54060) <- (200 ) 2024-11-21T23:18:15.627221Z node 7 :HTTP DEBUG: (#44,[::1]:54060) connection closed Http output full {"Failed":[{"Message":"The specified receipt handle isn't valid.","Id":"Id-0","Code":"ReceiptHandleIsInvalid","SenderFault":true}]} 2024-11-21T23:18:15.629245Z node 7 :HTTP DEBUG: (#44,[::1]:54066) incoming connection opened 2024-11-21T23:18:15.629316Z node 7 :HTTP DEBUG: (#44,[::1]:54066) -> (POST /Root) 2024-11-21T23:18:15.629453Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [78f2:b700:6050:0:60f2:b700:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: ff116ae5-72e3624b-a365954d-ad5b2f3f 2024-11-21T23:18:15.629948Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [ff116ae5-72e3624b-a365954d-ad5b2f3f] got new request from [78f2:b700:6050:0:60f2:b700:6050:0] 2024-11-21T23:18:15.630372Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [ff116ae5-72e3624b-a365954d-ad5b2f3f] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:15.630411Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [ff116ae5-72e3624b-a365954d-ad5b2f3f] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:15.644886Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [ff116ae5-72e3624b-a365954d-ad5b2f3f] Got succesfult GRPC response. 2024-11-21T23:18:15.645008Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [ff116ae5-72e3624b-a365954d-ad5b2f3f] reply ok 2024-11-21T23:18:15.645179Z node 7 :HTTP DEBUG: (#44,[::1]:54066) <- (200 ) 2024-11-21T23:18:15.645295Z node 7 :HTTP DEBUG: (#44,[::1]:54066) connection closed Http output full {"Successful":[{"Id":"Id-0"}]} 2024-11-21T23:18:15.648963Z node 7 :HTTP DEBUG: (#47,[::1]:54078) incoming connection opened 2024-11-21T23:18:15.649043Z node 7 :HTTP DEBUG: (#47,[::1]:54078) -> (POST /Root) 2024-11-21T23:18:15.649217Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d869:4400:6050:0:c069:4400:6050:0] request [ChangeMessageVisibilityBatch] url [/Root] database [/Root] requestId: cff23b98-dc6eac3d-c183dfe9-5f3d8729 2024-11-21T23:18:15.649813Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cff23b98-dc6eac3d-c183dfe9-5f3d8729] got new request from [d869:4400:6050:0:c069:4400:6050:0] 2024-11-21T23:18:15.650190Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [cff23b98-dc6eac3d-c183dfe9-5f3d8729] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:15.650206Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cff23b98-dc6eac3d-c183dfe9-5f3d8729] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:15.670954Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [cff23b98-dc6eac3d-c183dfe9-5f3d8729] Got succesfult GRPC response. 2024-11-21T23:18:15.671134Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [cff23b98-dc6eac3d-c183dfe9-5f3d8729] reply ok 2024-11-21T23:18:15.671412Z node 7 :HTTP DEBUG: (#47,[::1]:54078) <- (200 ) 2024-11-21T23:18:15.671566Z node 7 :HTTP DEBUG: (#47,[::1]:54078) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] >> KqpStats::StatsProfile [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2024-11-21T23:17:19.425727Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875663093427357:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:19.425934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002499/r3tmp/tmpBRafdk/pdisk_1.dat 2024-11-21T23:17:19.840619Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:19.862960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:19.863087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:19.864823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20023, node 1 2024-11-21T23:17:19.947276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:19.947304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:19.947312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:19.947440Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:20.274650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.281302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.281351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.281943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:20.282098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:20.282112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:20.282794Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:20.282829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:20.283176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.284929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040330, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.284968Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:20.285187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:20.285882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.285996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.286044Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:20.286132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:20.286178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:20.286217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:17:20.288029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:20.288065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:20.288079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:20.288153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T23:17:20.295007Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:5675 2024-11-21T23:17:20.663520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.663711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.663746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.665667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:20.665813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.667195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040715, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.667229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231040715, at schemeshard: 72057594046644480 2024-11-21T23:17:20.667520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:20.667605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:20.667638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:17:20.668057Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.668219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.669791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:20.669835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:20.669854Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:20.669909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 waiting... 2024-11-21T23:17:20.678475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:20.688805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.689070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.689093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.689164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:20.689271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:20.689287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:20.689811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:20.689940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.690206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.691316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:20.691369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:20.691388Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:20.691450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 2024-11-21T23:17:20.707491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.707746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.708422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECT ... server disconnected, pipe [7:7439875910783978484:2485] destroyed 2024-11-21T23:18:17.089526Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.089541Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [7:7439875910783978485:2486] destroyed 2024-11-21T23:18:17.089559Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.089572Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439875910783978486:2487] destroyed 2024-11-21T23:18:17.089609Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.089628Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439875910783978482:2483] destroyed 2024-11-21T23:18:17.089647Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.089666Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [7:7439875910783978483:2484] destroyed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732231097,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732231097,"StorageLimitMb":0,"StreamName":"testtopic"}} 2024-11-21T23:18:17.092149Z node 7 :HTTP DEBUG: (#47,[::1]:38444) incoming connection opened 2024-11-21T23:18:17.092223Z node 7 :HTTP DEBUG: (#47,[::1]:38444) -> (POST /Root) 2024-11-21T23:18:17.092356Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [3824:a500:6050:0:2024:a500:6050:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: 8a7a6568-30e7342d-a0e47f7a-a0b963bb 2024-11-21T23:18:17.092750Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [8a7a6568-30e7342d-a0e47f7a-a0b963bb] got new request from [3824:a500:6050:0:2024:a500:6050:0] database '/Root' stream 'testtopic' 2024-11-21T23:18:17.093119Z node 7 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [8a7a6568-30e7342d-a0e47f7a-a0b963bb] [auth] Authorized successfully 2024-11-21T23:18:17.093248Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [8a7a6568-30e7342d-a0e47f7a-a0b963bb] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:17.094633Z node 7 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [8a7a6568-30e7342d-a0e47f7a-a0b963bb] reply ok 2024-11-21T23:18:17.094946Z node 7 :HTTP DEBUG: (#47,[::1]:38444) <- (200 ) 2024-11-21T23:18:17.095038Z node 7 :HTTP DEBUG: (#47,[::1]:38444) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1732231.097,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1732231.097,"StreamName":"testtopic"}} 2024-11-21T23:18:17.096104Z node 7 :HTTP DEBUG: (#44,[::1]:38454) incoming connection opened 2024-11-21T23:18:17.096157Z node 7 :HTTP DEBUG: (#44,[::1]:38454) -> (POST /Root) 2024-11-21T23:18:17.096466Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [58e4:ee00:6050:0:40e4:ee00:6050:0] request [DescribeStream] url [/Root] database [/Root] requestId: 9dba7311-df917fa2-e5d17967-5c915d27 2024-11-21T23:18:17.096805Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [9dba7311-df917fa2-e5d17967-5c915d27] got new request from [58e4:ee00:6050:0:40e4:ee00:6050:0] database '/Root' stream 'testtopic' 2024-11-21T23:18:17.097303Z node 7 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [9dba7311-df917fa2-e5d17967-5c915d27] [auth] Authorized successfully 2024-11-21T23:18:17.097409Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [9dba7311-df917fa2-e5d17967-5c915d27] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:17.098857Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:17.098897Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [7:7439875910783978512:2498], now have 1 active actors on pipe 2024-11-21T23:18:17.098925Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:17.098941Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [7:7439875910783978513:2499], now have 1 active actors on pipe 2024-11-21T23:18:17.098980Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:17.099000Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [7:7439875910783978509:2495], now have 1 active actors on pipe 2024-11-21T23:18:17.099027Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:17.099046Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [7:7439875910783978510:2496], now have 1 active actors on pipe 2024-11-21T23:18:17.099074Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:18:17.099092Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [7:7439875910783978511:2497], now have 1 active actors on pipe 2024-11-21T23:18:17.100855Z node 7 :HTTP_PROXY INFO: http request [DescribeStream] requestId [9dba7311-df917fa2-e5d17967-5c915d27] reply ok 2024-11-21T23:18:17.101395Z node 7 :HTTP DEBUG: (#44,[::1]:38454) <- (200 ) 2024-11-21T23:18:17.101504Z node 7 :HTTP DEBUG: (#44,[::1]:38454) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1732231097,"StorageLimitMb":0,"StreamName":"testtopic"}} 2024-11-21T23:18:17.103120Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.103147Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [7:7439875910783978509:2495] destroyed 2024-11-21T23:18:17.103171Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.103184Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [7:7439875910783978510:2496] destroyed 2024-11-21T23:18:17.103201Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.103215Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [7:7439875910783978511:2497] destroyed 2024-11-21T23:18:17.103236Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.103250Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [7:7439875910783978512:2498] destroyed 2024-11-21T23:18:17.103264Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:17.103276Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [7:7439875910783978513:2499] destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 2462, MsgBus: 1028 2024-11-21T23:17:35.243938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875733747148104:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:35.246699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f71/r3tmp/tmpdkvpUZ/pdisk_1.dat 2024-11-21T23:17:35.686563Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:35.710285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:35.710430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:35.713343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2462, node 1 2024-11-21T23:17:35.820901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:35.820929Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:35.820937Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:35.821048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1028 TClient is connected to server localhost:1028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:36.484144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.503060Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:36.520913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.672713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.830317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.910431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:38.997163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875746632051554:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:38.997253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.252962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.297286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.331759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.371220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.410814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.470243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.573683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875750927019353:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.573828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.574181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875750927019358:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.579725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:39.616153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875750927019360:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:40.236205Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875733747148104:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:40.236264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:40.767919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.808197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.877567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62571, MsgBus: 17216 2024-11-21T23:17:43.027985Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875765065987745:2176];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f71/r3tmp/tmpxTDVnw/pdisk_1.dat 2024-11-21T23:17:43.075283Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:17:43.186976Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:43.223285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:43.223371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:43.224668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62571, node 2 2024-11-21T23:17:43.415108Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:43.415127Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:43.415141Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:43.415235Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17216 TClient is connected to server localhost:17216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:17:43.892158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.915568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.007266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.222463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ... suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.437078Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.499630Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.582168Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.624710Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.691241Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.777553Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875852474062936:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.777713Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.778102Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875852474062941:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.782772Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:03.796779Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439875852474062943:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:04.007304Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439875835294191698:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:04.007390Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:05.370602Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:05.413843Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:18:05.550476Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25962, MsgBus: 22847 2024-11-21T23:18:07.742471Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875869366100898:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:07.742518Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f71/r3tmp/tmp32rmcO/pdisk_1.dat 2024-11-21T23:18:07.988488Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:08.008324Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:08.008420Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:08.010824Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25962, node 5 2024-11-21T23:18:08.119186Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:08.119213Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:08.119227Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:08.119367Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22847 TClient is connected to server localhost:22847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:08.837841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:08.862617Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:08.949359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:09.196672Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:09.317490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:12.742596Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439875869366100898:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:12.742679Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:12.927746Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875890840939078:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:12.927885Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:12.985384Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.051825Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.117436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.182868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.246865Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.367835Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:13.498383Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875895135906875:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:13.498531Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:13.502578Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875895135906880:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:13.515841Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:13.563923Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439875895135906882:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:15.570959Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.660868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.739890Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] Test command err: 2024-11-21T23:17:17.802246Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875653316001372:2257];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:17.802283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002513/r3tmp/tmppLvU2X/pdisk_1.dat 2024-11-21T23:17:18.280021Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:18.288579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:18.288746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:18.292528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4247, node 1 2024-11-21T23:17:18.404545Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:18.404572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:18.404580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:18.404681Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:18.731589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:18.738914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:18.740486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:18.741759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:18.741997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:18.742011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:18.742814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:18.742840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:18.743129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:18.745071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231038790, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:18.745111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:18.745458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:18.746173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:18.746354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:18.746438Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:18.746603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:18.746649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:18.746683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:17:18.753561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:18.753623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:18.753638Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:18.753720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T23:17:18.754229Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:10674 2024-11-21T23:17:18.950796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:18.951303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:18.951329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:18.952073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:18.952203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:18.954668Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:18.958128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231039007, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:18.958169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231039007, at schemeshard: 72057594046644480 2024-11-21T23:17:18.958440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:18.958555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:18.958596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 1 2024-11-21T23:17:18.959240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:18.959407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:18.960448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:18.960481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:18.960518Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:18.960572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2024-11-21T23:17:18.963880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:18.964368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:18.964386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:18.964464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:18.964539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:18.964611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:18.965272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:18.965389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:18.965618Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:18.966855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:18.966897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:18.967232Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:18.967296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 waiting... 2024-11-21T23:17:18.968738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:18.968964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:18.969581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operati ... 710708:0 progress is 1/1 2024-11-21T23:18:17.601754Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710708:0 2024-11-21T23:18:17.601794Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710708, publications: 2, subscribers: 1 2024-11-21T23:18:17.602326Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 28 Version: 8 PathOwnerId: 72057594046644480, cookie: 281474976710708 2024-11-21T23:18:17.602359Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710708 2024-11-21T23:18:17.602374Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710708, pathId: [OwnerId: 72057594046644480, LocalPathId: 28], version: 8 2024-11-21T23:18:17.602572Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710708 2024-11-21T23:18:17.602604Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710708 2024-11-21T23:18:17.602615Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710708, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 4 2024-11-21T23:18:17.602653Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710708, subscribers: 1 2024-11-21T23:18:17.605987Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS/cloud4/000000000000000301v0/v4, operationId: 281474976710709:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.606223Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710709:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:18:17.606995Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710709, database: /Root, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/SQS/cloud4/000000000000000301v0/v4 2024-11-21T23:18:17.607142Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:17.607349Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:17.607407Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710709:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:18:17.608302Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710709 2024-11-21T23:18:17.608334Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710709 2024-11-21T23:18:17.608352Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710709, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 5 2024-11-21T23:18:17.608550Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710709 2024-11-21T23:18:17.608571Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710709 2024-11-21T23:18:17.608582Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710709, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 3 2024-11-21T23:18:17.608919Z node 7 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710709, at schemeshard: 72057594046644480 2024-11-21T23:18:17.611622Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231097660, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:18:17.611673Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId#281474976710709:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1732231097660, at schemeshard: 72057594046644480 2024-11-21T23:18:17.611802Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710709:0 128 -> 240 2024-11-21T23:18:17.612376Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:18:17.612572Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:18:17.612620Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710709:0 ProgressState 2024-11-21T23:18:17.612705Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710709:0 progress is 1/1 2024-11-21T23:18:17.612744Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710709:0 2024-11-21T23:18:17.612784Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710709, publications: 2, subscribers: 1 2024-11-21T23:18:17.614332Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 31 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710709 2024-11-21T23:18:17.614997Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710709 2024-11-21T23:18:17.615023Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710709, pathId: [OwnerId: 72057594046644480, LocalPathId: 31], version: 6 2024-11-21T23:18:17.615231Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 32 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710709 2024-11-21T23:18:17.615255Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710709 2024-11-21T23:18:17.615269Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710709, pathId: [OwnerId: 72057594046644480, LocalPathId: 32], version: 4 2024-11-21T23:18:17.615312Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710709, subscribers: 1 2024-11-21T23:18:17.702835Z node 7 :HTTP_PROXY DEBUG: http request [CreateQueue] requestId [7b76a553-40d683d6-a36d76ab-5ebd6026] Got succesfult GRPC response. 2024-11-21T23:18:17.702909Z node 7 :HTTP_PROXY INFO: http request [CreateQueue] requestId [7b76a553-40d683d6-a36d76ab-5ebd6026] reply ok 2024-11-21T23:18:17.703048Z node 7 :HTTP DEBUG: (#44,[::1]:55110) <- (200 ) 2024-11-21T23:18:17.703138Z node 7 :HTTP DEBUG: (#44,[::1]:55110) connection closed Http output full {"QueueUrl":"http://ghrun-uc25mmfz34.auto.internal:8771/cloud4/000000000000000301v0/DlqName"} 2024-11-21T23:18:17.705306Z node 7 :HTTP DEBUG: (#44,[::1]:55126) incoming connection opened 2024-11-21T23:18:17.705472Z node 7 :HTTP DEBUG: (#44,[::1]:55126) -> (POST /Root) 2024-11-21T23:18:17.705628Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b855:d800:6050:0:a055:d800:6050:0] request [GetQueueAttributes] url [/Root] database [/Root] requestId: 5b96191f-4ca72fc0-f4e04c35-40c13234 2024-11-21T23:18:17.706244Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [5b96191f-4ca72fc0-f4e04c35-40c13234] got new request from [b855:d800:6050:0:a055:d800:6050:0] 2024-11-21T23:18:17.712452Z node 7 :HTTP_PROXY DEBUG: http request [GetQueueAttributes] requestId [5b96191f-4ca72fc0-f4e04c35-40c13234] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:17.712486Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [5b96191f-4ca72fc0-f4e04c35-40c13234] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:17.793758Z node 7 :HTTP_PROXY DEBUG: http request [GetQueueAttributes] requestId [5b96191f-4ca72fc0-f4e04c35-40c13234] Got succesfult GRPC response. 2024-11-21T23:18:17.793869Z node 7 :HTTP_PROXY INFO: http request [GetQueueAttributes] requestId [5b96191f-4ca72fc0-f4e04c35-40c13234] reply ok 2024-11-21T23:18:17.794151Z node 7 :HTTP DEBUG: (#44,[::1]:55126) <- (200 ) 2024-11-21T23:18:17.794270Z node 7 :HTTP DEBUG: (#44,[::1]:55126) connection closed Http output full {"Attributes":{"QueueArn":"yrn:yc:ymq:ru-central1:folder4:DlqName"}} 2024-11-21T23:18:17.796377Z node 7 :HTTP DEBUG: (#47,[::1]:55132) incoming connection opened 2024-11-21T23:18:17.796445Z node 7 :HTTP DEBUG: (#47,[::1]:55132) -> (POST /Root) 2024-11-21T23:18:17.797549Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [f8b9:1f00:6050:0:e0b9:1f00:6050:0] request [SetQueueAttributes] url [/Root] database [/Root] requestId: 978a9dd8-abdc45e0-50176089-17a23487 2024-11-21T23:18:17.799451Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [978a9dd8-abdc45e0-50176089-17a23487] got new request from [f8b9:1f00:6050:0:e0b9:1f00:6050:0] 2024-11-21T23:18:17.800274Z node 7 :HTTP_PROXY DEBUG: http request [SetQueueAttributes] requestId [978a9dd8-abdc45e0-50176089-17a23487] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:17.800320Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [978a9dd8-abdc45e0-50176089-17a23487] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:17.861076Z node 7 :HTTP_PROXY DEBUG: http request [SetQueueAttributes] requestId [978a9dd8-abdc45e0-50176089-17a23487] Got succesfult GRPC response. 2024-11-21T23:18:17.861146Z node 7 :HTTP_PROXY INFO: http request [SetQueueAttributes] requestId [978a9dd8-abdc45e0-50176089-17a23487] reply ok Http output full {}2024-11-21T23:18:17.862102Z node 7 :HTTP DEBUG: (#47,[::1]:55132) <- (200 ) 2024-11-21T23:18:17.862201Z node 7 :HTTP DEBUG: (#47,[::1]:55132) connection closed 2024-11-21T23:18:17.863679Z node 7 :HTTP DEBUG: (#44,[::1]:55136) incoming connection opened 2024-11-21T23:18:17.863747Z node 7 :HTTP DEBUG: (#44,[::1]:55136) -> (POST /Root) 2024-11-21T23:18:17.863915Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [7871:ae00:6050:0:6071:ae00:6050:0] request [ListDeadLetterSourceQueues] url [/Root] database [/Root] requestId: 71c8fe36-49591c0e-8b2d1859-81822c84 2024-11-21T23:18:17.864342Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [71c8fe36-49591c0e-8b2d1859-81822c84] got new request from [7871:ae00:6050:0:6071:ae00:6050:0] 2024-11-21T23:18:17.864953Z node 7 :HTTP_PROXY DEBUG: http request [ListDeadLetterSourceQueues] requestId [71c8fe36-49591c0e-8b2d1859-81822c84] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:17.864990Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [71c8fe36-49591c0e-8b2d1859-81822c84] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:17.890272Z node 7 :HTTP_PROXY DEBUG: http request [ListDeadLetterSourceQueues] requestId [71c8fe36-49591c0e-8b2d1859-81822c84] Got succesfult GRPC response. 2024-11-21T23:18:17.890413Z node 7 :HTTP_PROXY INFO: http request [ListDeadLetterSourceQueues] requestId [71c8fe36-49591c0e-8b2d1859-81822c84] reply ok 2024-11-21T23:18:17.890635Z node 7 :HTTP DEBUG: (#44,[::1]:55136) <- (200 ) 2024-11-21T23:18:17.890757Z node 7 :HTTP DEBUG: (#44,[::1]:55136) connection closed Http output full {"NextToken":"","QueueUrls":["http://ghrun-uc25mmfz34.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName"]} |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling >> KqpScanSpilling::SelfJoin >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::MultiUsedStage >> KqpScanSpilling::HandleErrorsCorrectly >> DstCreator::WithIntermediateDir >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] Test command err: Trying to start YDB, gRPC: 13106, MsgBus: 18727 2024-11-21T23:18:00.593636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875838570575499:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:00.593698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028fd/r3tmp/tmpB9dYBm/pdisk_1.dat 2024-11-21T23:18:01.008996Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:01.053724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:01.053835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13106, node 1 2024-11-21T23:18:01.056239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:01.117501Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:01.117521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:01.117527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:01.117604Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18727 TClient is connected to server localhost:18727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:01.695664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:01.707770Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:01.718264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:01.894446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:02.073053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:02.146850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:04.196579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875855750446177:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.196688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.556791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.597462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.630315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.655618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.679639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.706449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.764822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875855750446675:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.764905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.765295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875855750446680:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.768779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:04.790502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875855750446682:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:05.594084Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875838570575499:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:05.594149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18567, MsgBus: 13823 2024-11-21T23:18:07.046784Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875868713248980:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:07.046830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028fd/r3tmp/tmpvH9pbL/pdisk_1.dat 2024-11-21T23:18:07.185471Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:07.219437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:07.219521Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:07.221250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18567, node 2 2024-11-21T23:18:07.294725Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:07.294748Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:07.294756Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:07.294852Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13823 TClient is connected to server localhost:13823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:07.732668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:07.738291Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:18:07.745616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:07.821533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:07.985262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:08.068941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.363090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875885893119872:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.363210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.410384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.475438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.527149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.615659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.668442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.705628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.806736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875885893120374:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.806831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.807046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875885893120379:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.810801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:11.822888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875885893120381:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:12.052859Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875868713248980:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:12.052922Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4351, MsgBus: 20069 2024-11-21T23:18:14.431731Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875899243363411:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:14.431778Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028fd/r3tmp/tmpHGMK0l/pdisk_1.dat 2024-11-21T23:18:14.670437Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:14.671065Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:14.671143Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:14.708801Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4351, node 3 2024-11-21T23:18:14.813749Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:14.813778Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:14.813786Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:14.813897Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20069 TClient is connected to server localhost:20069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:15.321446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.330675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.396767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.577701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.659239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:18.114564Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875916423234227:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:18.114672Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:18.204495Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.276834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.322102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.365087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.405908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.447943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.496194Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875916423234727:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:18.496267Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:18.496544Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875916423234732:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:18.500751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:18.515090Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875916423234734:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:19.519536Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875899243363411:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:19.544946Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index >> DstCreator::SameOwner >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::RowsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 62217, MsgBus: 15866 2024-11-21T23:17:59.056292Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875835934835669:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:59.056417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002902/r3tmp/tmpZW1hBX/pdisk_1.dat 2024-11-21T23:17:59.590106Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:59.594619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:59.594741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:59.598027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62217, node 1 2024-11-21T23:17:59.726688Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:59.726711Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:59.726718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:59.726823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15866 TClient is connected to server localhost:15866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:00.314276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.335219Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:00.356433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.507490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.676247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.748828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:02.621206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875848819739250:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:02.630222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:02.944932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.013341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.044467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.083988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.152187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.199459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.296516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875853114707051:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.296602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.296836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875853114707056:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.300219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:03.317290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875853114707058:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:04.070546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875835934835669:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:04.082602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17437, MsgBus: 22436 2024-11-21T23:18:05.472680Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875863132993234:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:05.472731Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002902/r3tmp/tmp2Eguxm/pdisk_1.dat 2024-11-21T23:18:05.589831Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:05.614692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:05.614764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:05.621586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17437, node 2 2024-11-21T23:18:05.705509Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:05.705530Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:05.705538Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:05.705643Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22436 TClient is connected to server localhost:22436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:06.232997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:06.246350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:06.331536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:06.539661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:06.631179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:09.170569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875880312864119:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... "Node Type":"Precompute_0_4","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key","Value"],"limit":"1001","type":"Scan"},{"columns":["Key","Value"],"limit":"1001","type":"Scan"},{"columns":["Value"],"scan_by":["Key (20, 120]"],"type":"Scan"},{"columns":["Value"],"scan_by":["Key [10, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [10, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_4","PlanNodeType":"ResultSet"},{"PlanNodeId":12,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Sequential","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_4","Name":"TableRangeScan","ReadLimit":"1001","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_0","PlanNodeType":"ResultSet"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":24,"Plans":[{"PlanNodeId":26,"Plans":[{"PlanNodeId":27,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (20, 120]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_2","PlanNodeType":"ResultSet"},{"PlanNodeId":28,"Plans":[{"PlanNodeId":32,"Plans":[{"PlanNodeId":33,"Plans":[{"PlanNodeId":35,"Plans":[{"PlanNodeId":37,"Plans":[{"PlanNodeId":38,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_3","PlanNodeType":"ResultSet"},{"PlanNodeId":39,"Plans":[{"PlanNodeId":40,"Plans":[{"PlanNodeId":42,"Plans":[{"PlanNodeId":43,"Operators":[{"Scan":"Sequential","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_3","Name":"TableRangeScan","ReadLimit":"1001","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 29246, MsgBus: 8824 2024-11-21T23:18:15.950911Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875903571915807:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002902/r3tmp/tmpQypok0/pdisk_1.dat 2024-11-21T23:18:16.079716Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:16.206786Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:16.225201Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:16.225323Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:16.227235Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29246, node 3 2024-11-21T23:18:16.383382Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:16.383409Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:16.383423Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:16.383575Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8824 TClient is connected to server localhost:8824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:17.178980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.188744Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:17.205174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.295799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.559342Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.662325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.739679Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875925046753821:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.739764Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.799372Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.864579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.872511Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875903571915807:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:20.872590Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:20.972034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:21.030423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:21.127417Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:21.240401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:21.309047Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875929341721623:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:21.309158Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:21.309409Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875929341721628:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:21.321400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:21.343040Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875929341721630:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:16:16.217845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:16:16.217950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:16.218002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:16:16.218040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:16:16.218091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:16:16.218125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:16:16.218197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:16:16.218564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:16:16.300081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:16:16.300140Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:16.312376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:16:16.315010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:16:16.315160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:16:16.336288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:16:16.338746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:16:16.339375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:16.339718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:16:16.357917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:16.359814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:16.359896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:16.360097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:16:16.360149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:16.360186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:16:16.360297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.366909Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:16:16.505224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:16:16.505479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.505691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:16:16.505891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:16:16.505955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.510555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:16.510707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:16:16.510916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.510988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:16:16.511019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:16:16.511046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:16:16.512953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.513014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:16:16.513051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:16:16.514624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.514666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.514715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:16.514786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:16:16.518237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:16:16.520266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:16:16.520467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:16:16.521418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:16:16.521541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:16:16.521613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:16.521862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:16:16.521914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:16:16.522089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:16.522159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:16:16.524700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:16:16.524750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:16:16.524919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:16:16.524968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:16:16.525306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:16:16.525365Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:16:16.525473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:16:16.525502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:16.525539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:16:16.525628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:16:16.525663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:16:16.525689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:16:16.525751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:16:16.525926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:16:16.525956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:16:16.527959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:16.528068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:16:16.528105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:16:16.528140Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:16:16.528175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:16:16.528268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 3.631535Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:18:23.631565Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T23:18:23.631606Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:18:23.633333Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:18:23.633444Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:18:23.633474Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:18:23.634146Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:18:23.634224Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:18:23.634254Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:18:23.635110Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2024-11-21T23:18:23.635195Z node 18 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:18:23.635637Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2024-11-21T23:18:23.635869Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2024-11-21T23:18:23.635942Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2024-11-21T23:18:23.636030Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2024-11-21T23:18:23.639461Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:18:23.639598Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2024-11-21T23:18:23.639631Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2024-11-21T23:18:23.639663Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2024-11-21T23:18:23.639696Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2024-11-21T23:18:23.639802Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2024-11-21T23:18:23.641081Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2024-11-21T23:18:23.641135Z node 18 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:18:23.641439Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:18:23.641575Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2024-11-21T23:18:23.641612Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2024-11-21T23:18:23.641660Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2024-11-21T23:18:23.641774Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:379:2344] message: TxId: 103 2024-11-21T23:18:23.641864Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2024-11-21T23:18:23.641947Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2024-11-21T23:18:23.642018Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2024-11-21T23:18:23.642206Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2024-11-21T23:18:23.642279Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2024-11-21T23:18:23.642302Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2024-11-21T23:18:23.642338Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2024-11-21T23:18:23.642362Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2024-11-21T23:18:23.642387Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2024-11-21T23:18:23.642472Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2024-11-21T23:18:23.642498Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2024-11-21T23:18:23.642522Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2024-11-21T23:18:23.642552Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2024-11-21T23:18:23.642599Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2024-11-21T23:18:23.642627Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2024-11-21T23:18:23.642704Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2024-11-21T23:18:23.643413Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:18:23.643541Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2024-11-21T23:18:23.643678Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2024-11-21T23:18:23.643753Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2024-11-21T23:18:23.643816Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2024-11-21T23:18:23.644253Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:18:23.646408Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:18:23.646547Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:18:23.646594Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:18:23.646636Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:18:23.649457Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2024-11-21T23:18:23.649700Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2024-11-21T23:18:23.649786Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [18:763:2656] 2024-11-21T23:18:23.650166Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2024-11-21T23:18:23.650983Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:18:23.651359Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 434us result status StatusPathDoesNotExist 2024-11-21T23:18:23.651663Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2024-11-21T23:18:23.652418Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2024-11-21T23:18:23.652769Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 373us result status StatusPathDoesNotExist 2024-11-21T23:18:23.652984Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpFlipJoin::RightOnly_3 [GOOD] >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> DstCreator::KeyColumnsSizeMismatch >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] |86.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::RightTableValuePredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 4428, MsgBus: 65153 2024-11-21T23:17:31.333967Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875715111440533:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:31.334024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f7d/r3tmp/tmpIn020h/pdisk_1.dat 2024-11-21T23:17:31.739885Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:31.783717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:31.783827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:31.788693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4428, node 1 2024-11-21T23:17:31.892932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:31.892958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:31.892964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:31.893040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65153 TClient is connected to server localhost:65153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:32.492594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:32.505665Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:32.521217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:32.654852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:32.790145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:32.888775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:34.726472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875727996343922:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:34.726583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:35.017655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:35.061158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:35.093027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:35.128712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:35.162639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:35.208221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:35.305604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875732291311718:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:35.305711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:35.305954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875732291311723:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:35.311988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:35.326414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875732291311725:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:36.335783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875715111440533:2253];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:36.335862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:36.530524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:36.571412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:36.618762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:17:36.666135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2786, MsgBus: 22466 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f7d/r3tmp/tmpnyCxbQ/pdisk_1.dat 2024-11-21T23:17:38.789588Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:38.814870Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:38.836395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:38.836494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:38.838824Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2786, node 2 2024-11-21T23:17:38.951044Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:38.951069Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:38.951078Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:38.951202Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22466 TClient is connected to server localhost:22466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:39.515448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.524584Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:17:39.535452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:17:39.639481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, sub ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:08.759356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:08.818115Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:08.904292Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:08.985567Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:09.075053Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875879191632856:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:09.075179Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:09.075561Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875879191632861:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:09.082992Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:09.100106Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439875879191632863:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:11.228184Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.275826Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.324978Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.410224Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63081, MsgBus: 23969 2024-11-21T23:18:14.691261Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439875900299628066:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:14.691936Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f7d/r3tmp/tmpBKZaDc/pdisk_1.dat 2024-11-21T23:18:14.941363Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:14.974467Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:14.974584Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:14.977392Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63081, node 6 2024-11-21T23:18:15.057356Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:15.057385Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:15.057394Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:15.057537Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23969 TClient is connected to server localhost:23969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:15.830198Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.841266Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:15.853185Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:16.005309Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:16.391792Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:16.519719Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:19.691929Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439875900299628066:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:19.692064Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:19.797244Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875921774466246:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.806787Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.840810Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.907219Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.967329Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.014567Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.062977Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.149036Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.263997Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875926069434047:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.264131Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.264442Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875926069434052:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.271222Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:20.304888Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439875926069434054:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:22.883441Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.973657Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.061844Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.166914Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2024-11-21T23:17:19.007967Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875663344563234:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:19.008031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0024a3/r3tmp/tmprtuPyo/pdisk_1.dat 2024-11-21T23:17:19.508047Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:19.516467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:19.516579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:19.519039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1566, node 1 2024-11-21T23:17:19.655125Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:19.655166Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:19.655172Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:19.655261Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:19.947828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.956456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:19.962363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:17:19.963744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2024-11-21T23:17:19.963936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046644480 2024-11-21T23:17:19.963952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2024-11-21T23:17:19.964691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2024-11-21T23:17:19.964716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2024-11-21T23:17:19.965113Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:19.967200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040015, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:19.967250Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet 72057594046644480 2024-11-21T23:17:19.967604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2024-11-21T23:17:19.972353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:19.972515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:19.972583Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2024-11-21T23:17:19.972659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2024-11-21T23:17:19.972688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2024-11-21T23:17:19.972721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2024-11-21T23:17:19.975697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2024-11-21T23:17:19.975732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2024-11-21T23:17:19.975746Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2024-11-21T23:17:19.975927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2024-11-21T23:17:19.976106Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:1203 2024-11-21T23:17:20.211252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: //Root, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.211486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.211509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.212047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: //Root 2024-11-21T23:17:20.212188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.213582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1732231040260, transactions count in step: 1, at schemeshard: 72057594046644480 2024-11-21T23:17:20.213606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 281474976710658:0, stepId:1732231040260, at schemeshard: 72057594046644480 2024-11-21T23:17:20.213829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710658:0 progress is 1/1 2024-11-21T23:17:20.213893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2024-11-21T23:17:20.213925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2024-11-21T23:17:20.214347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:20.216809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.217522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2024-11-21T23:17:20.217589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710658 2024-11-21T23:17:20.217608Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2024-11-21T23:17:20.217682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 0 2024-11-21T23:17:20.219497Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:17:20.227363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.227674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.227695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.227778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710659:0 progress is 1/1 2024-11-21T23:17:20.227893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2024-11-21T23:17:20.227907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 0 2024-11-21T23:17:20.228472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, subject: , status: StatusSuccess, operation: MODIFY ACL, path: //Root, add access: +F:Service1_id@as, add access: +F:proxy_sa@as 2024-11-21T23:17:20.228582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2024-11-21T23:17:20.228805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2024-11-21T23:17:20.229711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710659 2024-11-21T23:17:20.229744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710659 2024-11-21T23:17:20.229768Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710659, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2024-11-21T23:17:20.229825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 0 waiting... 2024-11-21T23:17:20.234049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/SQS, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:20.234254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2024-11-21T23:17:20.234992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: CRE ... request [DeleteMessageBatch] requestId [55256e42-f5917cfd-a135f795-c5feaa2] got new request with incorrect json from [58de:1f00:6050:0:40de:1f00:6050:0] 2024-11-21T23:18:24.465674Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [55256e42-f5917cfd-a135f795-c5feaa2] reply with status: BAD_REQUEST message: LogMessageFatal exception 2024-11-21T23:18:24.466576Z node 7 :HTTP DEBUG: (#47,[::1]:60594) <- (400 InvalidArgumentException) 2024-11-21T23:18:24.466626Z node 7 :HTTP DEBUG: (#47,[::1]:60594) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.DeleteMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 84 { "QueueUrl":"http://ghrun-uc25mmfz34.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries": { } } 0 2024-11-21T23:18:24.466663Z node 7 :HTTP DEBUG: (#47,[::1]:60594) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 55256e42-f5917cfd-a135f795-c5feaa2 x-amz-crc32: 3518254967 Content-Type: application/x-amz-json-1.1 Content-Length: 75 {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T23:18:24.466748Z node 7 :HTTP DEBUG: (#47,[::1]:60594) connection closed Http output full {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} F0000 00:00:1732231104.468516 339656 generated_message_reflection.cc:181] F0000 00:00:1732231104.468516 339656 generated_message_reflection.cc:181] Http output full {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T23:18:24.467940Z node 7 :HTTP DEBUG: (#45,[::1]:60598) incoming connection opened 2024-11-21T23:18:24.468004Z node 7 :HTTP DEBUG: (#45,[::1]:60598) -> (POST /Root) 2024-11-21T23:18:24.468147Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [381d:1c00:6050:0:201d:1c00:6050:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: dd2457b9-c9916d3d-f98a7c84-59132e18 2024-11-21T23:18:24.468833Z node 7 :HTTP_PROXY WARN: http request [DeleteMessageBatch] requestId [dd2457b9-c9916d3d-f98a7c84-59132e18] got new request with incorrect json from [381d:1c00:6050:0:201d:1c00:6050:0] 2024-11-21T23:18:24.468855Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [dd2457b9-c9916d3d-f98a7c84-59132e18] reply with status: BAD_REQUEST message: LogMessageFatal exception 2024-11-21T23:18:24.468977Z node 7 :HTTP DEBUG: (#45,[::1]:60598) <- (400 InvalidArgumentException) 2024-11-21T23:18:24.469018Z node 7 :HTTP DEBUG: (#45,[::1]:60598) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.DeleteMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 7a { "QueueUrl":"http://ghrun-uc25mmfz34.auto.internal:8771/cloud4/000000000000000101v0/ExampleQueueName", "Entries":"" } 0 2024-11-21T23:18:24.469055Z node 7 :HTTP DEBUG: (#45,[::1]:60598) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: dd2457b9-c9916d3d-f98a7c84-59132e18 x-amz-crc32: 3518254967 Content-Type: application/x-amz-json-1.1 Content-Length: 75 {"__type":"InvalidArgumentException","message":"LogMessageFatal exception"} 2024-11-21T23:18:24.469112Z node 7 :HTTP DEBUG: (#45,[::1]:60598) connection closed 2024-11-21T23:18:24.470466Z node 7 :HTTP DEBUG: (#47,[::1]:60608) incoming connection opened 2024-11-21T23:18:24.470532Z node 7 :HTTP DEBUG: (#47,[::1]:60608) -> (POST /Root) 2024-11-21T23:18:24.470629Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d89a:d00:6050:0:c09a:d00:6050:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: d148addc-96cb1e5f-eebbc860-fc3481b6 2024-11-21T23:18:24.471277Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [d148addc-96cb1e5f-eebbc860-fc3481b6] got new request from [d89a:d00:6050:0:c09a:d00:6050:0] 2024-11-21T23:18:24.471811Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [d148addc-96cb1e5f-eebbc860-fc3481b6] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:24.471841Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [d148addc-96cb1e5f-eebbc860-fc3481b6] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:24.541043Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [d148addc-96cb1e5f-eebbc860-fc3481b6] Got succesfult GRPC response. 2024-11-21T23:18:24.541301Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [d148addc-96cb1e5f-eebbc860-fc3481b6] reply ok 2024-11-21T23:18:24.541681Z node 7 :HTTP DEBUG: (#47,[::1]:60608) <- (200 ) 2024-11-21T23:18:24.541797Z node 7 :HTTP DEBUG: (#47,[::1]:60608) connection closed Http output full {"Successful":[{"SequenceNumber":"0","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MessageId":"ca5c0832-76d8de98-a002aa19-cb20e4ee"},{"SequenceNumber":"0","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"c1791fda-43c4e55-c788af9d-81993fba"}]} 2024-11-21T23:18:24.547823Z node 7 :HTTP DEBUG: (#45,[::1]:60618) incoming connection opened 2024-11-21T23:18:24.547889Z node 7 :HTTP DEBUG: (#45,[::1]:60618) -> (POST /Root) 2024-11-21T23:18:24.548344Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [580f:6a00:6050:0:400f:6a00:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 6ef35e1a-1cb60f9c-a12bc8a1-b988da91 2024-11-21T23:18:24.548749Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [6ef35e1a-1cb60f9c-a12bc8a1-b988da91] got new request from [580f:6a00:6050:0:400f:6a00:6050:0] 2024-11-21T23:18:24.549181Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [6ef35e1a-1cb60f9c-a12bc8a1-b988da91] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:24.549200Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [6ef35e1a-1cb60f9c-a12bc8a1-b988da91] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:24.654269Z node 7 :SQS WARN: Mark infly [cloud4/000000000000000101v0/2] for reloading. Reason: MessageDeleted 2024-11-21T23:18:24.662797Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [6ef35e1a-1cb60f9c-a12bc8a1-b988da91] Got succesfult GRPC response. 2024-11-21T23:18:24.663063Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [6ef35e1a-1cb60f9c-a12bc8a1-b988da91] reply ok 2024-11-21T23:18:24.663344Z node 7 :HTTP DEBUG: (#45,[::1]:60618) <- (200 ) 2024-11-21T23:18:24.663468Z node 7 :HTTP DEBUG: (#45,[::1]:60618) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SenderId":"fake_user_sid@as","SentTimestamp":"1732231104475","ApproximateFirstReceiveTimestamp":"1732231104613","ApproximateReceiveCount":"1"},"ReceiptHandle":"EAEg5ZCUiLUyKAI","Body":"MessageBody-0","MessageId":"ca5c0832-76d8de98-a002aa19-cb20e4ee"}]} 2024-11-21T23:18:24.674708Z node 7 :HTTP DEBUG: (#47,[::1]:60620) incoming connection opened 2024-11-21T23:18:24.674817Z node 7 :HTTP DEBUG: (#47,[::1]:60620) -> (POST /Root) 2024-11-21T23:18:24.675033Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [d818:2300:6050:0:c018:2300:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 2afbfbe8-7014bb2d-df9ea75f-af27ed3e 2024-11-21T23:18:24.675641Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [2afbfbe8-7014bb2d-df9ea75f-af27ed3e] got new request from [d818:2300:6050:0:c018:2300:6050:0] 2024-11-21T23:18:24.676071Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [2afbfbe8-7014bb2d-df9ea75f-af27ed3e] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:24.676095Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [2afbfbe8-7014bb2d-df9ea75f-af27ed3e] sending grpc request to '' database: '/Root' iam token size: 0 2024-11-21T23:18:24.730494Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [2afbfbe8-7014bb2d-df9ea75f-af27ed3e] Got succesfult GRPC response. 2024-11-21T23:18:24.730729Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [2afbfbe8-7014bb2d-df9ea75f-af27ed3e] reply ok 2024-11-21T23:18:24.730966Z node 7 :HTTP DEBUG: (#47,[::1]:60620) <- (200 ) 2024-11-21T23:18:24.731055Z node 7 :HTTP DEBUG: (#47,[::1]:60620) connection closed Http output full {"Messages":[{"MD5OfBody":"3bf7e6d806a0b8062135ae945eca30bf","Attributes":{"SenderId":"fake_user_sid@as","SentTimestamp":"1732231104475","ApproximateFirstReceiveTimestamp":"1732231104677","ApproximateReceiveCount":"1"},"ReceiptHandle":"EAIgpZGUiLUyKAI","Body":"MessageBody-1","MessageId":"c1791fda-43c4e55-c788af9d-81993fba"}]} 2024-11-21T23:18:24.732553Z node 7 :HTTP DEBUG: (#45,[::1]:60636) incoming connection opened 2024-11-21T23:18:24.732632Z node 7 :HTTP DEBUG: (#45,[::1]:60636) -> (POST /Root) 2024-11-21T23:18:24.732759Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [9880:e800:6050:0:8080:e800:6050:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: 6a7283db-1aad5899-5a8620da-67a877fc 2024-11-21T23:18:24.733331Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [6a7283db-1aad5899-5a8620da-67a877fc] got new request from [9880:e800:6050:0:8080:e800:6050:0] 2024-11-21T23:18:24.733667Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [6a7283db-1aad5899-5a8620da-67a877fc] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:24.733688Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [6a7283db-1aad5899-5a8620da-67a877fc] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2024-11-21T23:18:24.780566Z node 7 :HTTP_PROXY DEBUG: http request [DeleteMessageBatch] requestId [6a7283db-1aad5899-5a8620da-67a877fc] Got succesfult GRPC response. 2024-11-21T23:18:24.780750Z node 7 :HTTP_PROXY INFO: http request [DeleteMessageBatch] requestId [6a7283db-1aad5899-5a8620da-67a877fc] reply ok 2024-11-21T23:18:24.780908Z node 7 :HTTP DEBUG: (#45,[::1]:60636) <- (200 ) 2024-11-21T23:18:24.781018Z node 7 :HTTP DEBUG: (#45,[::1]:60636) connection closed 2024-11-21T23:18:24.782213Z node 7 :HTTP DEBUG: (#47,[::1]:60638) incoming connection opened 2024-11-21T23:18:24.782290Z node 7 :HTTP DEBUG: (#47,[::1]:60638) -> (POST /Root) 2024-11-21T23:18:24.782435Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [f815:3300:6050:0:e015:3300:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 8885b9ae-5c7495b6-2118f076-70820b57 2024-11-21T23:18:24.782853Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [8885b9ae-5c7495b6-2118f076-70820b57] got new request from [f815:3300:6050:0:e015:3300:6050:0] 2024-11-21T23:18:24.783293Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [8885b9ae-5c7495b6-2118f076-70820b57] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2024-11-21T23:18:24.783313Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [8885b9ae-5c7495b6-2118f076-70820b57] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {} 2024-11-21T23:18:24.787876Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [8885b9ae-5c7495b6-2118f076-70820b57] Got succesfult GRPC response. 2024-11-21T23:18:24.787941Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [8885b9ae-5c7495b6-2118f076-70820b57] reply ok 2024-11-21T23:18:24.788084Z node 7 :HTTP DEBUG: (#47,[::1]:60638) <- (200 ) 2024-11-21T23:18:24.788173Z node 7 :HTTP DEBUG: (#47,[::1]:60638) connection closed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26314, MsgBus: 24716 2024-11-21T23:17:38.097456Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875747082220754:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:38.097639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f62/r3tmp/tmpPT26k8/pdisk_1.dat 2024-11-21T23:17:38.827810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:38.830735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:38.838639Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:38.858003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26314, node 1 2024-11-21T23:17:38.942901Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:38.942934Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:38.942945Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:38.943044Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24716 TClient is connected to server localhost:24716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:39.501355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.533938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:17:39.639677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.797098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.859373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:41.509824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875759967124218:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:41.509913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:41.753031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.839004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.870285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.908972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.956863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.060009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.141805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875764262092016:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.141904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.146884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875764262092021:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.150685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:42.163696Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:17:42.164072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875764262092023:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:43.103574Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875747082220754:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:43.103637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:43.408323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.447340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.488884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.550627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.629289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.676770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26656, MsgBus: 13677 2024-11-21T23:17:45.502875Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875775355245968:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f62/r3tmp/tmpvMbQ5u/pdisk_1.dat 2024-11-21T23:17:45.602189Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:17:45.730190Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26656, node 2 2024-11-21T23:17:45.798995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:45.799091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:45.800691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:45.805662Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:45.805677Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:45.805682Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:45.805763Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13677 TClient is connected to server localhost:13677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:46.324379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... 24-11-21T23:18:04.581869Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.625185Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.658342Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.702866Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:04.803506Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875859041142739:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.803619Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.804016Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875859041142744:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:04.808262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:04.823746Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439875859041142746:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:05.743656Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439875841861271493:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:05.743740Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:06.289506Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:06.347000Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:18:06.389660Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:18:06.438557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:18:06.467596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T23:18:06.499847Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 65015, MsgBus: 26151 2024-11-21T23:18:09.545054Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875877689366984:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f62/r3tmp/tmp6pnZre/pdisk_1.dat 2024-11-21T23:18:09.603806Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:09.693128Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:09.757399Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:09.757507Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:09.761093Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65015, node 5 2024-11-21T23:18:09.907849Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:09.907876Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:09.907887Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:09.908005Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26151 TClient is connected to server localhost:26151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:11.026675Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.038933Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:11.061269Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.209975Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.444717Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.639379Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:14.502119Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439875877689366984:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:14.502220Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:15.073763Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875903459172317:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.074087Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.110971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.171313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.259252Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.319909Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.384671Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.456001Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.551287Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875903459172810:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.551413Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.551706Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875903459172815:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.557664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:15.589924Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439875903459172817:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:24.675138Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:24.675182Z node 5 :IMPORT WARN: Table profiles were not loaded |86.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> DstCreator::WithIntermediateDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableValuePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 22332, MsgBus: 11270 2024-11-21T23:17:37.927577Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875741902009733:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:37.927635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f63/r3tmp/tmp5snsJn/pdisk_1.dat 2024-11-21T23:17:38.692951Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:38.738222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:38.738335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:38.741031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22332, node 1 2024-11-21T23:17:38.845126Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:38.845148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:38.845155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:38.845257Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11270 TClient is connected to server localhost:11270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:39.478549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.495563Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:39.503711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.680074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:17:39.880422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.961106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:41.810010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875759081880636:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:41.810138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.063982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.103924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.167069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.271958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.372254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.460610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.536907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875763376848436:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.536998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.537275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875763376848441:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.541381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:42.565957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875763376848443:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:42.940840Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875741902009733:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:42.940902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:43.994286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.032457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.062213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.145803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.180560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6814, MsgBus: 22651 2024-11-21T23:17:47.706593Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875784223518089:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:47.730711Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f63/r3tmp/tmpxT4IYp/pdisk_1.dat 2024-11-21T23:17:47.892765Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6814, node 2 2024-11-21T23:17:48.051404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:48.051528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:48.055594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:48.082985Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:48.083010Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:48.083017Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:48.083156Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22651 TClient is connected to server localhost:22651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:48.720419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:48.727175Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, t ... 144977Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.466833Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875885319063240:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.466962Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.528015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.569622Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439875863844225075:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:11.569700Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:11.616297Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.677229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.720206Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.814977Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.891510Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:11.996142Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875885319063748:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.996248Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:11.996658Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439875885319063753:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:12.001210Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:12.014717Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439875885319063755:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:14.059716Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24614, MsgBus: 4978 2024-11-21T23:18:17.191000Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439875911994309029:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:17.191053Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f63/r3tmp/tmpz5CVk6/pdisk_1.dat 2024-11-21T23:18:17.384363Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:17.402121Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:17.402245Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:17.403951Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24614, node 5 2024-11-21T23:18:17.505168Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:17.505194Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:17.505203Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:17.505330Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4978 TClient is connected to server localhost:4978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:18.364712Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:18.383616Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:18.470247Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:18.730811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:18.860239Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:21.837987Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875929174179908:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:21.838098Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:21.909167Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:21.990062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.073361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.165307Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.198527Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439875911994309029:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:22.198655Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:22.247594Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.460783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.598149Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875933469147722:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:22.598276Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:22.598511Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875933469147727:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:22.605464Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:22.621685Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439875933469147729:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:24.787238Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> DstCreator::SameOwner [GOOD] >> DstCreator::NonExistentSrc >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction >> TColumnShardTestSchema::HotTiers [GOOD] >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> KqpStats::DeferredEffects [GOOD] >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> KqpLimits::ReplySizeExceeded [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> KqpExplain::MultiUsedStage [GOOD] >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] >> KqpExplain::MultiJoinCteLinks >> KqpStats::DataQueryWithEffects >> DstCreator::KeyColumnNameMismatch >> KqpScanSpilling::SelfJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 1458, MsgBus: 6882 2024-11-21T23:17:36.569603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875737457951802:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:36.569682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f68/r3tmp/tmpoFTR5H/pdisk_1.dat 2024-11-21T23:17:37.030848Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:37.032729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:37.032835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:37.037497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1458, node 1 2024-11-21T23:17:37.142059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:37.142083Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:37.142093Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:37.142289Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6882 TClient is connected to server localhost:6882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:37.740607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:37.764192Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:37.769596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:37.958098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:38.208574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:38.410042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:40.193441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875754637822477:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:40.193553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:40.437327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.485085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.556263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.625112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.652647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.730018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.809835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875754637822986:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:40.809922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:40.810128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875754637822991:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:40.819148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:40.831417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875754637822994:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:41.570576Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875737457951802:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:41.570642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:42.101688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.135119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.205409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.235586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.263458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.292981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61664, MsgBus: 24539 2024-11-21T23:17:44.049873Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875769046350573:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:44.049905Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f68/r3tmp/tmpHMdPVo/pdisk_1.dat 2024-11-21T23:17:44.144435Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:44.156591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:44.156673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:44.157816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61664, node 2 2024-11-21T23:17:44.280631Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:44.280668Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:44.280676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:44.280785Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24539 TClient is connected to server localhost:24539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:17:44.745991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself i ... t; 2024-11-21T23:18:13.993413Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875897136744928:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:13.993530Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.053198Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.155613Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.208318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.271038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.344376Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.415680Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.509909Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875901431712726:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.510006Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.511153Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875901431712731:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.516505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:14.531561Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439875901431712733:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:16.296013Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:16.344173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5647, MsgBus: 27906 2024-11-21T23:18:18.646309Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439875915895751833:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:18.646401Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f68/r3tmp/tmpbDoce4/pdisk_1.dat 2024-11-21T23:18:19.054818Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:19.058559Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:19.058675Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:19.060382Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5647, node 6 2024-11-21T23:18:19.295104Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:19.295135Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:19.295143Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:19.295285Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27906 TClient is connected to server localhost:27906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:18:20.202895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.219413Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:20.233379Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.353184Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.726015Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.873633Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:23.649624Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439875915895751833:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:23.649710Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:25.169388Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875945960524605:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:25.169499Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:25.210262Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:25.259678Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:25.330888Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:25.387999Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:25.476056Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:25.525337Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:25.682718Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875945960525128:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:25.682825Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:25.683320Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875945960525133:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:25.689844Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:25.723861Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439875945960525135:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:28.239996Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.291028Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithIntermediateDir [GOOD] Test command err: 2024-11-21T23:18:24.210652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875942382218397:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:24.210735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001570/r3tmp/tmpLPDtEj/pdisk_1.dat 2024-11-21T23:18:24.913737Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:24.953268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:24.953352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:24.956244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14417 TServer::EnableGrpc on GrpcPort 20370, node 1 2024-11-21T23:18:25.534950Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:25.534978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:25.534984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:25.535066Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:26.030496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:26.057971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231106186 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231106095 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231106186 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T23:18:26.275021Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:26.275154Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:26.275167Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:26.275944Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:28.713103Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231106186, tx_id: 281474976710658 } } } 2024-11-21T23:18:28.713547Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:28.715960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2024-11-21T23:18:28.718217Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2024-11-21T23:18:28.718235Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2024-11-21T23:18:28.759196Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 TClient::Ls request: /Root/Dir/Replicated 2024-11-21T23:18:28.759237Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 4] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231108797 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) >> BasicUsage::PropagateSessionClosed >> BasicUsage::FallbackToSingleDb >> KqpQuery::RowsLimit [GOOD] >> KqpTypes::SelectNull [GOOD] >> BasicUsage::GetAllStartPartitionSessions |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> DstCreator::NonExistentSrc [GOOD] |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> BasicUsage::RetryDiscoveryWithCancel >> BasicUsage::WriteSessionWriteInHandlers >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> KqpTypes::MultipleCurrentUtcTimestamp |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> KqpLimits::AffectedShardsLimit |86.7%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |86.7%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2024-11-21T23:14:36.893401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:36.893468Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:36.968942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:38.152205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:38.152284Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:38.214761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:39.726599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:39.726682Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:39.770521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:42.691230Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:42.691302Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:42.739541Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:45.993853Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:45.994078Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:46.043910Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:47.936397Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:47.936470Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:48.005647Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:48.925645Z node 6 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 7 2024-11-21T23:14:48.925818Z node 6 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 7 2024-11-21T23:14:48.925904Z node 6 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 7 2024-11-21T23:14:48.926926Z node 7 :TX_PROXY WARN: actor# [7:339:2085] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2024-11-21T23:14:50.320287Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:50.320378Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:50.362895Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:51.972632Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:51.972717Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:52.096312Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:53.812279Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:53.812358Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:53.875996Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:55.146497Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2024-11-21T23:14:55.355728Z node 11 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:55.356183Z node 11 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/00239e/r3tmp/tmpQHRAD7/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:55.356741Z node 11 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/00239e/r3tmp/tmpQHRAD7/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/00239e/r3tmp/tmpQHRAD7/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4857716340601784367 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:55.359461Z node 11 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/00239e/r3tmp/tmpQHRAD7/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2024-11-21T23:14:55.428517Z node 12 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:918} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2024-11-21T23:14:55.429023Z node 12 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2629} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/dl5p/00239e/r3tmp/tmpQHRAD7/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2024-11-21T23:14:55.429243Z node 12 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:281} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/dl5p/00239e/r3tmp/tmpQHRAD7/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/dl5p/00239e/r3tmp/tmpQHRAD7/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2143969260223262945 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1} PDiskId# 1000 2024-11-21T23:14:55.432111Z node 12 :BS_LOCALRECOVERY CRIT: VDISK[80000001:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 Hando ... 00-s[16/16]o)]} 2024-11-21T23:17:35.038326Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:35.038618Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:39.222888Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:39.223135Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:43.597568Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:43.597822Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:47.868466Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:47.868727Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:47.994228Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:47.994752Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:52.059943Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:52.060455Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:56.530814Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:17:56.531059Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:00.912032Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:00.912343Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:05.163848Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:05.164202Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:09.718731Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:09.719063Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:14.569737Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:14.570023Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:19.140567Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:19.141298Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:24.095595Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:24.095940Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:28.798107Z node 100 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} 2024-11-21T23:18:28.798364Z node 100 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(100:1-s[16/16])(101:1000-s[16/16]o)(102:1000-s[16/16]o)(103:1000-s[16/16]o)(104:1000-s[16/16]o)(105:1000-s[16/16]o)(106:1000-s[16/16]o)(107:1000-s[16/16]o)(108:1000-s[16/16]o)]} >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpExplain::MultiJoinCteLinks [GOOD] >> KqpStats::DataQueryWithEffects [GOOD] >> KqpParams::BadParameterType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/dl5p/001048/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 9113, MsgBus: 29636 2024-11-21T23:18:22.397807Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875933249837045:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:22.397853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001048/r3tmp/tmpYo7DUV/pdisk_1.dat 2024-11-21T23:18:23.493245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:23.508344Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:23.536806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:23.536906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:23.547723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9113, node 1 2024-11-21T23:18:23.847285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:23.847311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:23.847320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:23.847451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29636 TClient is connected to server localhost:29636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:24.690578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:24.725623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:24.904596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.073128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:18:25.162241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.413984Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875933249837045:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:27.414045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:27.423378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875954724675230:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.423494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.851942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.885110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.918985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.980819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.032060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.067919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.154551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875959019643026:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.154655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.155134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875959019643031:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.159274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:28.171381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:18:28.172404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875959019643033:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:31.255123Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2024-11-21T23:18:31.255578Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545560:2527], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [1:7439875971904545560:2527], task: 1 2024-11-21T23:18:31.255620Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2024-11-21T23:18:31.255638Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545560:2527], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set periodic stats 0.100000s 2024-11-21T23:18:31.255683Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545560:2527], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. EVLOGKQP START 2024-11-21T23:18:31.255832Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=2;input_channels_count=2; 2024-11-21T23:18:31.255919Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2024-11-21T23:18:31.257095Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:45 :META:Table { TableId { OwnerId: 72057594046644480 TableId: 6 } TablePath: "/Root/KeyValue" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Columns { Id: 1 Name: "Key" Type: 4 } Columns { Id: 2 Name: "Value" Type: 4097 } KeyColumnTypes: 4 Reads { ShardId: 72075186224037911 KeyRanges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } } ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC EnableShardsSequentialScan: true KeyColumnTypeInfos { } ReadType: ROWS 2024-11-21T23:18:31.257258Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545562:2528], TxId: 281474976710682, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Start compute actor [1:7439875971904545562:2528], task: 2 2024-11-21T23:18:31.257303Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545562:2528], TxId: 281474976710682, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Set periodic stats 0.100000s 2024-11-21T23:18:31.263535Z node 1 :KQP_COMPUTE DEBUG: Register LocalFileSpillingActor [1:7439875971904545569:3834] at service [1:7597699455116079460:27756] 2024-11-21T23:18:31.263590Z node 1 :KQP_COMPUTE DEBUG: Register LocalFileSpillingActor [1:7439875971904545570:3835] at service [1:7597699455116079460:27756] 2024-11-21T23:18:31.265280Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545560:2527], TxId: 281474976710682, task: 1. Ctx: { TraceId : 01 ... /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.363721Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545563:2529], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2024-11-21T23:18:31.363734Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545563:2529], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:18:31.363779Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545563:2529], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:18:31.363792Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545563:2529], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T23:18:31.363821Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2024-11-21T23:18:31.363831Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2024-11-21T23:18:31.363848Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, waiting for chunk delivery in output channelId: 4, seqNo: [11] 2024-11-21T23:18:31.363859Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646923 2024-11-21T23:18:31.363875Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Finish input channelId: 4, from: [1:7439875971904545563:2529] 2024-11-21T23:18:31.363901Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.363906Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545563:2529], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2024-11-21T23:18:31.363935Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.363941Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545563:2529], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:18:31.363958Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545563:2529], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T23:18:31.363982Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2024-11-21T23:18:31.363995Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2024-11-21T23:18:31.364006Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished 2024-11-21T23:18:31.364019Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545563:2529], TxId: 281474976710682, task: 3. Ctx: { TraceId : 01jd8ga80z41zyrv3fyjs4vwah. SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T23:18:31.364116Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. pass away 2024-11-21T23:18:31.364206Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710682;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:18:31.365202Z node 1 :KQP_COMPUTE DEBUG: [CloseFile] from: [1:7439875971904545573:3837], error: (empty maybe) 2024-11-21T23:18:31.365255Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.365314Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.366007Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.366357Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.366456Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.366787Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.367192Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.367245Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.367259Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T23:18:31.367293Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2024-11-21T23:18:31.367317Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, waiting for chunk delivery in output channelId: 5, seqNo: [11] 2024-11-21T23:18:31.367603Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:18:31.367625Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T23:18:31.367645Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2024-11-21T23:18:31.367654Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished 2024-11-21T23:18:31.367667Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439875971904545564:2530], TxId: 281474976710682, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=M2E0N2RhYWYtNjdhYjk5ZTEtMTZkNzE1NjYtOWQwMzE0ZmE=. TraceId : 01jd8ga80z41zyrv3fyjs4vwah. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T23:18:31.367805Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. pass away 2024-11-21T23:18:31.367945Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710682;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:18:31.369228Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231111289, txId: 281474976710681] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::NonExistentSrc [GOOD] Test command err: 2024-11-21T23:18:25.590777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875945409889414:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:25.590842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001568/r3tmp/tmpSYP4ru/pdisk_1.dat 2024-11-21T23:18:26.153244Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:26.168971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:26.169052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:26.253963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9335 TServer::EnableGrpc on GrpcPort 11022, node 1 2024-11-21T23:18:26.687880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:26.687905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:26.687914Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:26.688030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:27.259039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.283398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.289109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231107418 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231107313 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231107418 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T23:18:27.451062Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:27.451212Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:27.451224Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:27.452092Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:29.361578Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231107418, tx_id: 281474976710659 } } } 2024-11-21T23:18:29.361944Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:29.364184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:18:29.365515Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2024-11-21T23:18:29.365527Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2024-11-21T23:18:29.404522Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2024-11-21T23:18:29.404569Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1732231109448 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T23:18:30.116546Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875969031625456:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001568/r3tmp/tmpPeTQ9Z/pdisk_1.dat 2024-11-21T23:18:30.235454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:30.252998Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:30.264608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:30.264695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:30.267910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31820 TServer::EnableGrpc on GrpcPort 64063, node 2 2024-11-21T23:18:30.557929Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:30.557952Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:30.557959Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:30.558253Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:31.008020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:31.024345Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231111065 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution:... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231111065 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution:... (TRUNCATED) 2024-11-21T23:18:31.030760Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:31.030898Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:31.030910Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:31.031628Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:33.386169Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2024-11-21T23:18:33.386227Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 24054, MsgBus: 29374 2024-11-21T23:17:58.639122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875831552170499:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:58.639176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002908/r3tmp/tmpcv0P5n/pdisk_1.dat 2024-11-21T23:17:59.011732Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24054, node 1 2024-11-21T23:17:59.023419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:59.023576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:59.029687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:59.099007Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:59.099028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:59.099034Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:59.099133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29374 TClient is connected to server localhost:29374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:59.835810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:59.893067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.115395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.367282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.471110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:02.751605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875848732041172:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:02.773803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:02.815780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.858367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.882621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.911477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:02.987480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.059082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:03.106774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875853027008971:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.106834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.106921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875853027008976:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:03.110651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:03.125279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875853027008978:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:03.636025Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875831552170499:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:03.636090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:04.312519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:08.727696Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjA2ZDhlODEtYTNmMDg3NDYtM2FjOTFhOS1iMzQzODk1Zg==, ActorId: [1:7439875870206879430:2575], ActorState: ExecuteState, TraceId: 01jd8g9hh962rxccd2g1t9n6a4, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (80001679 > 50331648), code: 2013 Trying to start YDB, gRPC: 26839, MsgBus: 25432 2024-11-21T23:18:10.066913Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875882029383668:2192];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002908/r3tmp/tmpVfs0in/pdisk_1.dat 2024-11-21T23:18:10.182084Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:10.368838Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:10.369699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:10.369792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:10.372264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26839, node 2 2024-11-21T23:18:10.542489Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:10.542512Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:10.542519Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:10.542630Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25432 TClient is connected to server localhost:25432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:11.262235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.293435Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:11.305743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.411188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.646742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... ce pool default not found or you don't have access permissions } 2024-11-21T23:18:14.115714Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.172536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.229062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.287252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.390276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.441917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.551365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.631389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875899209254896:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.631525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.631747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875899209254901:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.636001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:14.670282Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:18:14.670674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875899209254903:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:15.064710Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875882029383668:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:15.064933Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:15.980743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 23501, MsgBus: 3722 2024-11-21T23:18:18.642144Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875915698098791:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:18.642194Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002908/r3tmp/tmpqNvex1/pdisk_1.dat 2024-11-21T23:18:19.020765Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23501, node 3 2024-11-21T23:18:19.085228Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:19.089411Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:19.214958Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:19.298155Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:19.298181Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:19.298190Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:19.298319Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3722 TClient is connected to server localhost:3722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:18:20.074223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.095939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.233562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.433737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.524857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:23.647918Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875915698098791:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:23.648007Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:23.819604Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875937172936866:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:23.819697Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:23.866993Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.921461Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.995576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.054046Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.135786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.222042Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.311958Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875941467904667:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:24.312070Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:24.312751Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875941467904672:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:24.318334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:24.337642Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875941467904674:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:26.072849Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:30.961683Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzQyMjJiZi1kMDBhNzgzMC1kM2U2YTE1ZS1kMTA5MmYx, ActorId: [3:7439875950057839609:2465], ActorState: ExecuteState, TraceId: 01jd8ga7hjaxex0v793bpy66mf, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::RowsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 4009, MsgBus: 24674 2024-11-21T23:18:09.740498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875878626705869:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:09.754989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028f2/r3tmp/tmpP8g3TE/pdisk_1.dat 2024-11-21T23:18:10.531844Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:10.550044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:10.550154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:10.556316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4009, node 1 2024-11-21T23:18:10.733175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:10.733200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:10.733212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:10.733320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24674 TClient is connected to server localhost:24674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:11.787015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.806437Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.856969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:18:12.081613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:12.312624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:12.434725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:14.783200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875900101543841:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.783372Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875878626705869:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:14.817919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:14.818013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.849803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.924380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.976205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.012168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.092578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.150738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.203137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875904396511641:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.203223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.203438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875904396511646:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.208217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:15.224338Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:18:15.224694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875904396511648:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 5937, MsgBus: 1864 2024-11-21T23:18:17.738480Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875914163371177:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:17.738570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028f2/r3tmp/tmpUZjkjc/pdisk_1.dat 2024-11-21T23:18:17.984537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:17.984621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:17.987085Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:18.014887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5937, node 2 2024-11-21T23:18:18.218239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:18.218261Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:18.218273Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:18.218368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1864 TClient is connected to server localhost:1864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:18.720449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:18.728038Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:18.739784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.823004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:18.999350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:19.094222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:22.130538Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875935638209347:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:22.130611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:22.232793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.315805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.481962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.572713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.706409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.764826Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875914163371177:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:22.764933Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:22.834118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.971993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875935638209860:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:22.972124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:22.972371Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875935638209865:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:22.991524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:23.003451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875935638209867:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 5364, MsgBus: 29283 2024-11-21T23:18:25.722926Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875948549914390:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:25.723002Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028f2/r3tmp/tmpEd1zSY/pdisk_1.dat 2024-11-21T23:18:26.113014Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:26.118640Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:26.118727Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:26.124916Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5364, node 3 2024-11-21T23:18:26.342941Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:26.342967Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:26.342974Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:26.343066Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29283 TClient is connected to server localhost:29283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:27.420448Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.432197Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:27.454514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:18:27.542798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.814901Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.923686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:30.577325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875970024752574:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:30.577435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:30.704961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:30.729246Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875948549914390:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:30.729300Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:30.756833Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:30.806701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:30.858133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:30.941929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:31.004497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:31.119359Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875974319720378:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.119479Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.119733Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875974319720383:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.122996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:31.134310Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875974319720385:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |86.7%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCH5-StreamLookupJoin+ColumnStore [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites >> BasicUsage::SelectDatabaseByHash [GOOD] >> TColumnShardTestSchema::RebootHotTiersRevCompression >> KqpQueryPerf::MultiRead-QueryService >> BasicUsage::WaitEventBlocksBeforeDiscovery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryWithEffects [GOOD] Test command err: Trying to start YDB, gRPC: 16318, MsgBus: 19677 2024-11-21T23:18:12.941838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875892835700958:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:12.941881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028ea/r3tmp/tmpYn053K/pdisk_1.dat 2024-11-21T23:18:13.654659Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:13.672963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:13.673080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:13.701528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16318, node 1 2024-11-21T23:18:13.871003Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:13.871033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:13.871043Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:13.871145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19677 TClient is connected to server localhost:19677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:14.659567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:14.676893Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:14.685217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:14.827155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.023363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.102774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.052033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875914310538981:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:17.052153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:17.424532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.477433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.530720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.579173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.607209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.688887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.825778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875914310539483:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:17.825868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:17.827738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875914310539489:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:17.838866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:17.873571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875914310539491:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:17.946520Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875892835700958:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:17.946683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:20.790832Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231100243, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 23244, MsgBus: 13324 2024-11-21T23:18:22.002420Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875933980028275:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:22.002494Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028ea/r3tmp/tmpYIjiVP/pdisk_1.dat 2024-11-21T23:18:22.276007Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23244, node 2 2024-11-21T23:18:22.288081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:22.288164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:22.541350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:22.627005Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:22.627031Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:22.627039Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:22.627145Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13324 TClient is connected to server localhost:13324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:23.681536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:23.695753Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:23.713555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:23.824929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:24.046742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:24.176566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.006545Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875933980028275:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:27.006636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:27.196321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875955454866468:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.196401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.258118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.291358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.335519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.389096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.434721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.539375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.618611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875955454866967:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.618734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.619077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875955454866972:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.637903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:27.664563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875955454866974:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking }
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 13170, MsgBus: 5554 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028ea/r3tmp/tmpL1aWPU/pdisk_1.dat 2024-11-21T23:18:31.826996Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:31.839362Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:31.873545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:31.873643Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:31.879812Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13170, node 3 2024-11-21T23:18:32.042359Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:32.042385Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:32.042417Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:32.042539Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5554 TClient is connected to server localhost:5554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:32.982974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.009862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.117742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.395393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.514255Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:35.965397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875988707102508:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:35.965489Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.018888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.057873Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.135575Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.212361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.261914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.342203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.402768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875993002070306:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.402858Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.403073Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875993002070311:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.408108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:36.423599Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875993002070313:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpJoinOrder::TPCH8-StreamLookupJoin-ColumnStore >> BasicUsage::SelectDatabase [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2024-11-21T23:15:28.613714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875188144070188:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:28.613821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:29.001289Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000ecc/r3tmp/tmp5H7KvX/pdisk_1.dat 2024-11-21T23:15:29.324014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:29.324093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:29.325704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:29.337311Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1891, node 1 2024-11-21T23:15:29.494964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/000ecc/r3tmp/yandexpOmxjB.tmp 2024-11-21T23:15:29.494989Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/000ecc/r3tmp/yandexpOmxjB.tmp 2024-11-21T23:15:29.495139Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/000ecc/r3tmp/yandexpOmxjB.tmp 2024-11-21T23:15:29.495221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:15:29.599989Z INFO: TTestServer started on Port 7470 GrpcPort 1891 TClient is connected to server localhost:7470 PQClient connected to localhost:1891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:30.007134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:30.033451Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:30.042680Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2024-11-21T23:15:30.047994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:30.230676Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:30.250504Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:15:32.753224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875205323940032:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.753368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.753641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875205323940044:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:15:32.757803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2024-11-21T23:15:32.792859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875205323940046:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2024-11-21T23:15:33.101980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.111353Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439875209618907415:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:15:33.113351Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzMyNzc3NzAtM2EzY2YzNWQtMzE4YzZhMzAtNmM2MzUxOGM=, ActorId: [1:7439875205323940014:2308], ActorState: ExecuteState, TraceId: 01jd8g4t877jhq4q4x4zyccgd2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:15:33.115875Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:15:33.138570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.264938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2024-11-21T23:15:33.616092Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875188144070188:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:33.616158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [1:7439875209618907689:2624] === CheckClustersList. Ok 2024-11-21T23:15:39.534677Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2024-11-21T23:15:39.579717Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2024-11-21T23:15:39.581055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7439875235388711728:2774], Recipient [1:7439875192439037774:2196]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:39.581091Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:39.581111Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2024-11-21T23:15:39.581158Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7439875235388711724:2771], Recipient [1:7439875192439037774:2196]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2024-11-21T23:15:39.581174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2024-11-21T23:15:39.695957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710672 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2024-11-21T23:15:39.696497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:15:39.696796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2024-11-21T23:15:39.696843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2024-11-21T23:15:39.696868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2024-11-21T23:15:39.696910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2024-11-21T23:15:39.696955Z node 1 :FLAT_TX_ ... : Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 1 EndOffset: 10 WriteTimestampMS: 1732231074522 CreateTimestampMS: 1732231074477 SizeLag: 1179 WriteTimestampEstimateMS: 1732231074691 } Cookie: 18446744073709551615 } 2024-11-21T23:17:54.704175Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 2024-11-21T23:17:54.704245Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 sending to client partition status 2024-11-21T23:17:54.705330Z :INFO: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 5 2024-11-21T23:17:54.706977Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2024-11-21T23:17:54.706991Z node 6 :PERSQUEUE DEBUG: Read proxy: bootstrap for direct read id: 0 2024-11-21T23:17:54.707015Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2024-11-21T23:17:54.707135Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 1414 endOffset 10 max time lag 0ms effective offset 5 2024-11-21T23:17:54.707159Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 5 2024-11-21T23:17:54.707544Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2024-11-21T23:17:54.707574Z node 6 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T23:17:54.705939Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2024-11-21T23:17:54.707742Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2024-11-21T23:17:54.706093Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2024-11-21T23:17:54.706152Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 clientCommitOffset (empty maybe) clientReadOffset 5 2024-11-21T23:17:54.706202Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2024-11-21T23:17:54.706289Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1732231074522, sizeLag# 1179 2024-11-21T23:17:54.706306Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1TEvPartitionReady. Aval parts: 1 2024-11-21T23:17:54.706376Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 performing read request: guid# 5c970d77-e02da56c-44632cb2-123d8016, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 1414, partitionsAsked# 1, maxTimeLag# 0ms 2024-11-21T23:17:54.706508Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 1414 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 1 committedOffset 1 Guid 5c970d77-e02da56c-44632cb2-123d8016 2024-11-21T23:17:54.708541Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1732231074550 CreateTimestampMS: 1732231074548 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1732231074558 CreateTimestampMS: 1732231074548 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1732231074558 CreateTimestampMS: 1732231074548 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1732231074558 CreateTimestampMS: 1732231074548 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1732231074558 CreateTimestampMS: 1732231074548 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551581 RealReadOffset: 9 WaitQuotaTimeMs: 0 } Cookie: 5 } 2024-11-21T23:17:54.708782Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2024-11-21T23:17:54.708819Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid 5c970d77-e02da56c-44632cb2-123d8016 has messages 1 2024-11-21T23:17:54.708973Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 read done: guid# 5c970d77-e02da56c-44632cb2-123d8016, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 558 2024-11-21T23:17:54.708995Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 response to read: guid# 5c970d77-e02da56c-44632cb2-123d8016 2024-11-21T23:17:54.709181Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 Process answer. Aval parts: 0 2024-11-21T23:17:54.709642Z :DEBUG: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] Got ReadResponse, serverBytesSize = 558, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428242 2024-11-21T23:17:54.709731Z :DEBUG: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428242 2024-11-21T23:17:54.710011Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2024-11-21T23:17:54.710097Z :DEBUG: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] Returning serverBytesSize = 558 to budget 2024-11-21T23:17:54.710132Z :DEBUG: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] In ContinueReadingDataImpl, ReadSizeBudget = 558, ReadSizeServerDelta = 52428242 2024-11-21T23:17:54.710526Z :DEBUG: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2024-11-21T23:17:54.710697Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2024-11-21T23:17:54.710751Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (6-6) 2024-11-21T23:17:54.710777Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (7-7) 2024-11-21T23:17:54.710799Z :DEBUG: [] Take Data. Partition 0. Read: {1, 2} (8-8) 2024-11-21T23:17:54.710742Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 grpc read done: success# 1, data# { read_request { bytes_size: 558 } } 2024-11-21T23:17:54.710823Z :DEBUG: [] Take Data. Partition 0. Read: {1, 3} (9-9) 2024-11-21T23:17:54.710880Z :DEBUG: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2024-11-21T23:17:54.710875Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 got read request: guid# d724456e-31e79bce-15b3a39c-203975f4 2024-11-21T23:17:54.710931Z :DEBUG: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:17:54.711077Z :INFO: [] [] [2be68200-e6b13bc2-c978f319-f4073827] Closing read session. Close timeout: 0.000000s 2024-11-21T23:17:54.711121Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:1 2024-11-21T23:17:54.711170Z :INFO: [] [] [2be68200-e6b13bc2-c978f319-f4073827] Counters: { Errors: 0 CurrentSessionLifetimeMs: 60 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:17:54.711299Z :NOTICE: [] [] [2be68200-e6b13bc2-c978f319-f4073827] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:17:54.711347Z :DEBUG: [] [] [2be68200-e6b13bc2-c978f319-f4073827] [] Abort session to cluster 2024-11-21T23:17:54.711893Z :NOTICE: [] [] [2be68200-e6b13bc2-c978f319-f4073827] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:17:54.712630Z node 5 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 grpc read done: success# 0, data# { } 2024-11-21T23:17:54.713426Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|29994b3f-fc06d700-b2acad88-9427952c_0] Write session: destroy 2024-11-21T23:17:54.712686Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 grpc read failed 2024-11-21T23:17:54.712715Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 grpc closed 2024-11-21T23:17:54.713579Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:17:54.712757Z node 5 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_5_2_11673575762438427771_v1 is DEAD 2024-11-21T23:17:54.713616Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_5_2_11673575762438427771_v1 2024-11-21T23:17:54.713661Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7439875814501563754:2494] destroyed 2024-11-21T23:17:54.713853Z node 6 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_5_2_11673575762438427771_v1 2024-11-21T23:17:54.714367Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [5:7439875814501563751:2491] disconnected; active server actors: 1 2024-11-21T23:17:54.714419Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [5:7439875814501563751:2491] client user disconnected session shared/user_5_2_11673575762438427771_v1 >> KqpLimits::AffectedShardsLimit [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> KqpParams::BadParameterType [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] >> BasicUsage::BasicWriteSession >> Viewer::ServerlessWithExclusiveNodes [GOOD] |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |86.7%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |86.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> Viewer::ServerlessWithExclusiveNodesCheckTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231629.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231629.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231629.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231629.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231629.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231629.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230429.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112231629.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112231629.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230429.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112230429.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112230429.000000s;Name=;Codec=}; 2024-11-21T23:17:10.055057Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:17:10.480191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:17:10.515176Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:17:10.515258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:17:10.515532Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:17:10.566671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:17:10.566914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:17:10.567161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:17:10.567309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:17:10.567437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:17:10.567586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:17:10.567711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:17:10.567826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:17:10.567936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:17:10.568068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:17:10.568181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:17:10.568307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:17:10.615623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:17:10.615731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:17:10.658937Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:17:10.670649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:17:10.670726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:17:10.670909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:17:10.671139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:17:10.671185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:17:10.671276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:17:10.671387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:17:10.671431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:17:10.671649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:17:10.671766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:17:10.671798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:17:10.671881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:17:10.672135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:17:10.672171Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:17:10.672256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:17:10.672294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:17:10.672342Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:17:10.672394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:17:10.672441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:17:10.672472Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:17:10.672653Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2024-11-21T23:17:10.672770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2024-11-21T23:17:10.672852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2024-11-21T23:17:10.672936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2024-11-21T23:17:10.673104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:17:10.673180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:17:10.673227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... me=69; 2024-11-21T23:18:31.549049Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:18:31.549108Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=134; 2024-11-21T23:18:31.549207Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=55; 2024-11-21T23:18:31.549291Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2024-11-21T23:18:31.549517Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=187; 2024-11-21T23:18:31.550127Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=558; 2024-11-21T23:18:31.550242Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=52; 2024-11-21T23:18:31.550314Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=39; 2024-11-21T23:18:31.550357Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2024-11-21T23:18:31.550461Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2024-11-21T23:18:31.550505Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2024-11-21T23:18:31.550596Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2024-11-21T23:18:31.550651Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2024-11-21T23:18:31.550781Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2024-11-21T23:18:31.550832Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2024-11-21T23:18:31.550902Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2024-11-21T23:18:31.550937Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29160; 2024-11-21T23:18:31.551096Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:18:31.551214Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:18:31.551261Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard_impl.cpp:1558;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T23:18:31.551302Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:18:31.551423Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T23:18:31.551551Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T23:18:31.551660Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:18:31.551701Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:18:31.551781Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:18:31.551823Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:18:31.551880Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:18:31.551954Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T23:18:31.552011Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:18:31.552051Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:18:31.552102Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:18:31.552143Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:18:31.552187Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:18:31.552277Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:18:31.552734Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:18:31.553411Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1575:3444];tablet_id=9437184;parent=[1:1534:3410];fline=manager.h:99;event=ask_data;request=request_id=139;1={portions_count=21};; 2024-11-21T23:18:31.554008Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:18:31.554428Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:18:31.554466Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:18:31.554494Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:18:31.554539Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:18:31.554599Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:18:31.554656Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T23:18:31.554720Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:18:31.554762Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:18:31.554812Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:18:31.554849Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:18:31.554895Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:18:31.554977Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:18:31.559232Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T23:18:31.560650Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T23:18:31.566679Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:18:31.566769Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> DstCreator::ExistingDst >> KqpLimits::DatashardReplySize [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService [GOOD] >> TColumnShardTestSchema::RebootOneTierExternalTtl >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> BasicUsage::FallbackToSingleDb [GOOD] >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> DstCreator::ExistingDst [GOOD] >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] >> DstCreator::ColumnsSizeMismatch >> DstCreator::EmptyReplicationConfig >> SystemView::CollectPreparedQueries >> DstCreator::ColumnsSizeMismatch [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> KqpLimits::QueryExecTimeout >> BasicUsage::PreferredDatabaseNoFallback >> DstCreator::ColumnTypeMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231629.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231629.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231629.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231629.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231629.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231629.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230429.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112231629.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112231629.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230429.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112230429.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112230429.000000s;Name=;Codec=}; 2024-11-21T23:17:09.868231Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:17:10.479978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:17:10.514250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:17:10.514335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:17:10.514611Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:17:10.566572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:17:10.566812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:17:10.567057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:17:10.567164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:17:10.567263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:17:10.567361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:17:10.567487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:17:10.567618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:17:10.567716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:17:10.567828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:17:10.567925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:17:10.568020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:17:10.610428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:17:10.610511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:17:10.656422Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:17:10.668778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:17:10.668879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:17:10.669107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:17:10.669291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:17:10.669378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:17:10.669423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:17:10.669518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:17:10.669592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:17:10.669639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:17:10.669687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:17:10.669903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:17:10.669971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:17:10.670019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:17:10.670052Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:17:10.670145Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:17:10.670200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:17:10.670247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:17:10.670279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:17:10.670364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:17:10.670428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:17:10.670459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:17:10.670512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:17:10.670556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:17:10.670587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:17:10.670794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2024-11-21T23:17:10.670887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2024-11-21T23:17:10.670983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2024-11-21T23:17:10.671077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=49; 2024-11-21T23:17:10.671259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:17:10.671324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:17:10.671362Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... 024-11-21T23:18:30.451228Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:941:2941] finished for tablet 9437184 2024-11-21T23:18:30.451318Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:941:2941] send ScanData to [1:940:2940] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:18:30.451840Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:941:2941] and sent to [1:940:2940] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.008},{"events":["f_processing","f_task_result"],"t":0.01},{"events":["f_ack","l_task_result"],"t":0.291},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.296}],"full":{"a":1732231110155113,"name":"_full_task","f":1732231110155113,"d_finished":0,"c":0,"l":1732231110451367,"d":296254},"events":[{"name":"bootstrap","f":1732231110155377,"d_finished":8405,"c":1,"l":1732231110163782,"d":8405},{"a":1732231110451006,"name":"ack","f":1732231110446718,"d_finished":3280,"c":3,"l":1732231110450909,"d":3641},{"a":1732231110450994,"name":"processing","f":1732231110165522,"d_finished":188691,"c":24,"l":1732231110450912,"d":189064},{"name":"ProduceResults","f":1732231110157855,"d_finished":10209,"c":29,"l":1732231110451213,"d":10209},{"a":1732231110451216,"name":"Finish","f":1732231110451216,"d_finished":0,"c":0,"l":1732231110451367,"d":151},{"name":"task_result","f":1732231110165545,"d_finished":185004,"c":21,"l":1732231110446542,"d":185004}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:941:2941]->[1:940:2940] 2024-11-21T23:18:30.451948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:18:30.154575Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T23:18:30.451988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:18:30.452176Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.097048s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.095319s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.043865s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.015412s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:18:30.452255Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:18:30.454376Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T23:18:30.454842Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T23:18:30.454998Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T23:18:30.455193Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T23:18:30.455273Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T23:18:30.455919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:948:2948];trace_detailed=; 2024-11-21T23:18:30.456358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T23:18:30.456588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:18:30.456799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:30.456954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:30.457233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:18:30.457343Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:30.457471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:30.457516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:948:2948] finished for tablet 9437184 2024-11-21T23:18:30.457611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:948:2948] send ScanData to [1:947:2947] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:18:30.458094Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:948:2948] and sent to [1:947:2947] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1732231110455830,"name":"_full_task","f":1732231110455830,"d_finished":0,"c":0,"l":1732231110457670,"d":1840},"events":[{"name":"bootstrap","f":1732231110456033,"d_finished":957,"c":1,"l":1732231110456990,"d":957},{"a":1732231110457206,"name":"ack","f":1732231110457206,"d_finished":0,"c":0,"l":1732231110457670,"d":464},{"a":1732231110457185,"name":"processing","f":1732231110457185,"d_finished":0,"c":0,"l":1732231110457670,"d":485},{"name":"ProduceResults","f":1732231110456692,"d_finished":530,"c":2,"l":1732231110457501,"d":530},{"a":1732231110457504,"name":"Finish","f":1732231110457504,"d_finished":0,"c":0,"l":1732231110457670,"d":166}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:948:2948]->[1:947:2947] 2024-11-21T23:18:30.458204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:18:30.455339Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T23:18:30.458251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:18:30.458302Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T23:18:30.465602Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::BadParameterType [GOOD] Test command err: Trying to start YDB, gRPC: 12054, MsgBus: 29337 2024-11-21T23:18:13.264131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:13.267477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:13.267672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028f3/r3tmp/tmpajBs45/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12054, node 1 2024-11-21T23:18:14.036757Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:14.045747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:14.045828Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:14.045871Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:14.046172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:14.136247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:14.136447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:14.151887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29337 TClient is connected to server localhost:29337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 500 C... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:14.738482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:14.817057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.293082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:15.722765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:16.117553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.033392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1732:3353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:17.033694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:17.059593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.320517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.621740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:17.959360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.324703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:18.791419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.241116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2302:3794], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.241249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.241685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2307:3799], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.353231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:19.629468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2309:3801], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:21.438674Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:2607:4016] TxId: 281474976715671. Ctx: { TraceId: 01jd8g9yzr0czfktkq9kt5ebb4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNjEwNDItYWVkMzBjOGYtNDMxMDk0MGYtYjdiODljYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2024-11-21T23:18:21.447639Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2615:4057], TxId: 281474976715671, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MTMwNjEwNDItYWVkMzBjOGYtNDMxMDk0MGYtYjdiODljYw==. TraceId : 01jd8g9yzr0czfktkq9kt5ebb4. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2607:4016], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T23:18:21.449017Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2613:4055], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MTMwNjEwNDItYWVkMzBjOGYtNDMxMDk0MGYtYjdiODljYw==. CustomerSuppliedId : . TraceId : 01jd8g9yzr0czfktkq9kt5ebb4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2607:4016], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T23:18:21.449338Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2614:4056], TxId: 281474976715671, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jd8g9yzr0czfktkq9kt5ebb4. SessionId : ydb://session/3?node_id=1&id=MTMwNjEwNDItYWVkMzBjOGYtNDMxMDk0MGYtYjdiODljYw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2607:4016], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T23:18:21.451718Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTMwNjEwNDItYWVkMzBjOGYtNDMxMDk0MGYtYjdiODljYw==, ActorId: [1:2571:4016], ActorState: ExecuteState, TraceId: 01jd8g9yzr0czfktkq9kt5ebb4, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 27645, MsgBus: 6909 2024-11-21T23:18:26.138489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:87:2133], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:26.138690Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:18:26.138869Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028f3/r3tmp/tmpPXP7tr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27645, node 2 2024-11-21T23:18:26.749830Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:26.752873Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:26.752942Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:26.752979Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:26.753234Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:26.828187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:26.828309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:26.842951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6909 TClient is connected to server localhost:6909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 U ... :18:30.278102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:30.675727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:31.112620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:31.484062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2298:3790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.484183Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.484736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2303:3795], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.507884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:31.733637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2305:3797], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:33.596727Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:2605:4012] TxId: 281474976715671. Ctx: { TraceId: 01jd8gaavqe1rhfwga03hj2zwr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjAwN2U3OTAtM2U1N2IwZjgtZjEyN2M3NDgtYTU4NmEzZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2024-11-21T23:18:33.597425Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2613:4055], TxId: 281474976715671, task: 3. Ctx: { TraceId : 01jd8gaavqe1rhfwga03hj2zwr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjAwN2U3OTAtM2U1N2IwZjgtZjEyN2M3NDgtYTU4NmEzZTI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:2605:4012], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T23:18:33.597830Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2612:4054], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jd8gaavqe1rhfwga03hj2zwr. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjAwN2U3OTAtM2U1N2IwZjgtZjEyN2M3NDgtYTU4NmEzZTI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:2605:4012], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T23:18:33.615303Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:2611:4053], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jd8gaavqe1rhfwga03hj2zwr. SessionId : ydb://session/3?node_id=2&id=MjAwN2U3OTAtM2U1N2IwZjgtZjEyN2M3NDgtYTU4NmEzZTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:2605:4012], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2024-11-21T23:18:36.308448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:36.308512Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:36.774809Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjAwN2U3OTAtM2U1N2IwZjgtZjEyN2M3NDgtYTU4NmEzZTI=, ActorId: [2:2567:4012], ActorState: ExecuteState, TraceId: 01jd8gaavqe1rhfwga03hj2zwr, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 62106, MsgBus: 23324 2024-11-21T23:18:37.610818Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875997512294972:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:37.610890Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028f3/r3tmp/tmppfcQeD/pdisk_1.dat 2024-11-21T23:18:37.727301Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:37.774872Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:37.774990Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:37.779545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62106, node 3 2024-11-21T23:18:37.839732Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:37.839767Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:37.839777Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:37.839896Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23324 TClient is connected to server localhost:23324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:38.358514Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:38.371925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:38.455439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:38.625332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:38.680689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:40.796235Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876010397198562:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:40.796713Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:40.821216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.854951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.886967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.917282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.985738Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:41.013108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:41.100075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876014692166360:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:41.100154Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:41.100374Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876014692166365:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:41.103967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:41.115355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876014692166367:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:42.392350Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmM3NGM1YTgtOTc1ZTFlMDgtYmRkZmY2MjItYTkwNmYyMWE=, ActorId: [3:7439876018987133977:2459], ActorState: ExecuteState, TraceId: 01jd8gakadf16s0x9kpwrwmyvn, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $group type mismatch, expected: { Kind: Data Data { Scheme: 2 } }, actual: Type (Data), schemeType: Int32, schemeTypeId: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] Test command err: 2024-11-21T23:18:11.743929Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:18:11.848215Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:18:11.853097Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:18:11.853511Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:18:11.886935Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:18:11.887026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:18:11.887282Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:18:11.898939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:18:11.899227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:18:11.899470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:18:11.899603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:18:11.899700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:18:11.900215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:18:11.900543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:18:11.900671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:18:11.900773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:18:11.901095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:18:11.901589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:18:11.901728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:18:11.930138Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:18:11.930533Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T23:18:11.930619Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:18:11.930668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:18:11.941487Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:18:11.941788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:18:11.941855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:18:11.942033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:11.942254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:18:11.942345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:18:11.942384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:18:11.942526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:18:11.942598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:18:11.942639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:18:11.942672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:18:11.942854Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:11.942909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:18:11.943864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:18:11.943940Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:18:11.944068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:18:11.944143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:18:11.944184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:18:11.944215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:18:11.944303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:18:11.944346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:18:11.944374Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:18:11.944420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:18:11.944462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:18:11.944488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:18:11.944690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=67; 2024-11-21T23:18:11.944830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=86; 2024-11-21T23:18:11.944918Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2024-11-21T23:18:11.945039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=74; 2024-11-21T23:18:11.945223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:18:11.945288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:18:11.945331Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:18:11.945539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:18:11.945589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:18:11.945618Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:18:11.945783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:18:11.945846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2 ... t=start_construct_result;interval_idx=0;interval_id=16; 2024-11-21T23:18:43.424007Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T23:18:43.424152Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:18:43.424202Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T23:18:43.424249Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:18:43.424756Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T23:18:43.424805Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T23:18:43.424847Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=16; 2024-11-21T23:18:43.424896Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=16; 2024-11-21T23:18:43.424941Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T23:18:43.425047Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:18:43.425085Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T23:18:43.425124Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:18:43.425335Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:18:43.425485Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:18:43.425528Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:18:43.425641Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2024-11-21T23:18:43.425701Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2024-11-21T23:18:43.425803Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: uint64 2024-11-21T23:18:43.425921Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:18:43.426033Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:18:43.426136Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:18:43.426284Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:18:43.426491Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:18:43.426603Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:18:43.426644Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] finished for tablet 9437184 2024-11-21T23:18:43.426727Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:18:43.427199Z node 4 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [4:277:2289] and sent to [4:272:2284] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.138},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.139}],"full":{"a":1732231123286958,"name":"_full_task","f":1732231123286958,"d_finished":0,"c":0,"l":1732231123426774,"d":139816},"events":[{"name":"bootstrap","f":1732231123287695,"d_finished":2752,"c":1,"l":1732231123290447,"d":2752},{"a":1732231123426263,"name":"ack","f":1732231123425309,"d_finished":852,"c":1,"l":1732231123426161,"d":1363},{"a":1732231123426250,"name":"processing","f":1732231123290910,"d_finished":3304,"c":5,"l":1732231123426163,"d":3828},{"name":"ProduceResults","f":1732231123289119,"d_finished":2099,"c":8,"l":1732231123426628,"d":2099},{"a":1732231123426630,"name":"Finish","f":1732231123426630,"d_finished":0,"c":0,"l":1732231123426774,"d":144},{"name":"task_result","f":1732231123290927,"d_finished":2358,"c":4,"l":1732231123425180,"d":2358}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T23:18:43.427290Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:18:43.285831Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T23:18:43.427330Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:18:43.427440Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.133505s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.126707s;size=0.0063152;details={columns=9;};};]};; 2024-11-21T23:18:43.427543Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; >> KqpJoinOrder::TPCH3-StreamLookupJoin+ColumnStore [GOOD] >> DstCreator::EmptyReplicationConfig [GOOD] >> Viewer::TenantInfo5kkTablets [GOOD] >> DstCreator::ColumnTypeMismatch [GOOD] >> SystemView::CollectPreparedQueries [GOOD] >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::BasicWriteSession [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH8-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH5+StreamLookupJoin-ColumnStore >> Viewer::UseTransactionWhenExecuteDataActionQuery >> SystemView::CollectScanQueries >> DstCreator::Basic >> DstCreator::WithSyncIndexAndIntermediateDir |86.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> DstCreator::WithAsyncIndex >> BasicUsage::ReadMirrored >> BasicUsage::WriteSessionCloseIgnoresWrites >> BasicUsage::CloseWriteSessionImmediately >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCH8+StreamLookupJoin-ColumnStore >> DstCreator::Basic [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> DstCreator::CannotFindColumn >> DstCreator::WithAsyncIndex [GOOD] >> SystemView::CollectScanQueries [GOOD] >> DstCreator::UnsupportedReplicationMode >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] |86.8%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6743, MsgBus: 62520 2024-11-21T23:18:36.390901Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875993596132522:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:36.391010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002868/r3tmp/tmpVNM9tF/pdisk_1.dat 2024-11-21T23:18:36.920114Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:36.928637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:36.928740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:36.935763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6743, node 1 2024-11-21T23:18:37.294204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:37.294235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:37.294249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:37.294429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62520 TClient is connected to server localhost:62520 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:38.412125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:38.474513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:38.618149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:38.778526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:38.848086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:39.783996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876006481035900:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.784108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:40.707564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.736924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.770544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.810681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.843174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.881091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:41.065302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876015070971007:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:41.065449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:41.065699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876015070971012:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:41.086893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:41.097385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876015070971014:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:41.391235Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875993596132522:2249];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:41.391318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] Test command err: 2024-11-21T23:18:46.068486Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:18:46.164904Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:18:46.171231Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:18:46.171750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:18:46.197539Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:18:46.197629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:18:46.197850Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:18:46.208265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:18:46.208647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:18:46.208905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:18:46.209010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:18:46.209137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:18:46.209272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:18:46.209401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:18:46.209508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:18:46.209625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:18:46.209730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:18:46.209861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:18:46.209990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:18:46.235801Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:18:46.236162Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T23:18:46.236249Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:18:46.236308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:18:46.246153Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:18:46.246450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:18:46.246505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:18:46.246696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:46.246886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:18:46.246990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:18:46.247042Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:18:46.247158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:18:46.247225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:18:46.247264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:18:46.247291Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:18:46.247457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:46.247538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:18:46.247574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:18:46.247618Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:18:46.247728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:18:46.247782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:18:46.247823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:18:46.247848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:18:46.247917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:18:46.247949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:18:46.247988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:18:46.248037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:18:46.248073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:18:46.248096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:18:46.248293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2024-11-21T23:18:46.248371Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2024-11-21T23:18:46.248462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2024-11-21T23:18:46.248555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2024-11-21T23:18:46.248709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:18:46.248756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:18:46.248787Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:18:46.248993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:18:46.249036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:18:46.249332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:18:46.249497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:18:46.249545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2 ... ctor.cpp:192;stage=start;iterator=ready_results:(count:2;records_count:53332;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.646953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:18:51.647068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=40002; 2024-11-21T23:18:51.647150Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=320016;num_rows=40002;batch_columns=timestamp; 2024-11-21T23:18:51.647381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:555:2546] send ScanData to [1:554:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 320016 rows: 40002 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T23:18:51.647556Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.647674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.647703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:18:51.647746Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:18:51.647850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:18:51.647925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:13330;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.647954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:18:51.648046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=13330; 2024-11-21T23:18:51.648084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=106640;num_rows=13330;batch_columns=timestamp; 2024-11-21T23:18:51.648245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:555:2546] send ScanData to [1:554:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 106640 rows: 13330 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T23:18:51.648319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.648399Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.648496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.648590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:18:51.648654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.648708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:51.648734Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:555:2546] finished for tablet 9437184 2024-11-21T23:18:51.648816Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:555:2546] send ScanData to [1:554:2545] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:18:51.649233Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:555:2546] and sent to [1:554:2545] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.231},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.234}],"full":{"a":1732231131414129,"name":"_full_task","f":1732231131414129,"d_finished":0,"c":0,"l":1732231131648853,"d":234724},"events":[{"name":"bootstrap","f":1732231131414362,"d_finished":5556,"c":1,"l":1732231131419918,"d":5556},{"a":1732231131648576,"name":"ack","f":1732231131645769,"d_finished":2590,"c":3,"l":1732231131648523,"d":2867},{"a":1732231131648567,"name":"processing","f":1732231131421153,"d_finished":154329,"c":24,"l":1732231131648524,"d":154615},{"name":"ProduceResults","f":1732231131416676,"d_finished":6377,"c":29,"l":1732231131648721,"d":6377},{"a":1732231131648723,"name":"Finish","f":1732231131648723,"d_finished":0,"c":0,"l":1732231131648853,"d":130},{"name":"task_result","f":1732231131421176,"d_finished":151326,"c":21,"l":1732231131645660,"d":151326}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2024-11-21T23:18:51.649306Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:18:51.412972Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4710140;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4710140;selected_rows=0; 2024-11-21T23:18:51.649349Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:18:51.649489Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.127047s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.194852s;size=0.002187128;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::LAST_PK;duration=0.013616s;size=0.00311667;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:18:51.649574Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:555:2546];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 65273, MsgBus: 3749 2024-11-21T23:18:42.015502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876021369720789:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:42.015566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002860/r3tmp/tmpS6Bbkk/pdisk_1.dat 2024-11-21T23:18:42.307318Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65273, node 1 2024-11-21T23:18:42.380537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:42.380700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:42.384856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:42.407732Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:42.407793Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:42.407804Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:42.407923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3749 TClient is connected to server localhost:3749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:42.984079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:43.024293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:18:43.178849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:18:43.404036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:43.489952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:45.237452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876034254624388:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.237567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.493730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:45.523052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:45.559330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:45.591123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:45.628407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:45.662208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:45.719465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876034254624879:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.719604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876034254624884:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.719591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.723054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:45.733584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876034254624886:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:47.016701Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876021369720789:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:47.016824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 14113, MsgBus: 18207 2024-11-21T23:18:15.413137Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875904541215277:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:15.413198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028e1/r3tmp/tmpABPVUU/pdisk_1.dat 2024-11-21T23:18:16.005454Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:16.011712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:16.011828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:16.013516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14113, node 1 2024-11-21T23:18:16.246885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:16.246908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:16.246917Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:16.247013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18207 TClient is connected to server localhost:18207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:17.068144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.113663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.307091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.507350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.598498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.191364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875921721086157:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.201598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.248765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.345670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.399024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.416256Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875904541215277:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:20.416344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:20.451729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.500626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.566697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:20.654580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875926016053956:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.654685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.655122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875926016053961:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:20.659462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:20.691104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875926016053963:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 1471, MsgBus: 28756 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028e1/r3tmp/tmpN1pYJq/pdisk_1.dat 2024-11-21T23:18:23.724528Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875940627300238:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:23.724630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:23.887304Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:23.917963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:23.918063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:23.924182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1471, node 2 2024-11-21T23:18:24.096077Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:24.096100Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:24.096107Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:24.096204Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28756 TClient is connected to server localhost:28756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitR ... 4322:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028e1/r3tmp/tmp4kauhy/pdisk_1.dat 2024-11-21T23:18:32.375984Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:32.478356Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:32.495707Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:32.495787Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:32.504930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8171, node 3 2024-11-21T23:18:32.577764Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:32.578092Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:32.578101Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:32.578217Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62602 TClient is connected to server localhost:62602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:33.196223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.207849Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:18:33.229818Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.360648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.587085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.692064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:36.195605Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875995078415056:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.197120Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.232291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.380689Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.455109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.494985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.530599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.576925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:36.679827Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875995078415562:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.679942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.680140Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875995078415567:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:36.684130Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:36.697965Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875995078415569:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:37.300203Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875977898544322:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:37.300305Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":5}],"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":4}],"E-Rows":"No estimate","ReadColumns":["Data","Key","Text"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"},{"Inputs":[{"InternalOperatorId":6}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"}],"Node Type":"Limit-InnerJoin (MapJoin)-Filter-TablePointLookup-ConstantExpr-Filter-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":11}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_1_0","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"Aggregate","CTE Name":"precompute_0_0"}],"Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","ReadColumns":["Data","Key","Text"],"Name":"TablePointLookup","E-Size":"No estimate","E-Cost":"No estimate","Table":"EightShard"}],"Node Type":"TablePointLookup"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":16,"Plans":[{"PlanNodeId":22,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231628.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231628.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231628.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231628.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231628.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231628.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230428.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112231628.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112231628.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230428.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112230428.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112230428.000000s;Name=;Codec=}; 2024-11-21T23:17:09.793625Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:17:10.481155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:17:10.507824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:17:10.507901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:17:10.508131Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:17:10.566580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:17:10.566816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:17:10.567059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:17:10.567171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:17:10.567272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:17:10.567380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:17:10.567504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:17:10.567636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:17:10.567735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:17:10.567851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:17:10.567948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:17:10.568043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:17:10.613463Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:17:10.613547Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:17:10.656878Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:17:10.670814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:17:10.670884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:17:10.671064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:17:10.671284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:17:10.671325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:17:10.671412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:17:10.671529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:17:10.671586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:17:10.671793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:17:10.671871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:17:10.671896Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:17:10.671953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:17:10.672012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:17:10.672029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:17:10.672074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:17:10.672096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:17:10.672116Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:17:10.672147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:17:10.672169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:17:10.672187Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:17:10.672332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2024-11-21T23:17:10.672412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2024-11-21T23:17:10.672484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2024-11-21T23:17:10.672569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2024-11-21T23:17:10.672712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:17:10.672745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:17:10.672770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:17:10.672906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:17:10.672936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switch ... 2024-11-21T23:18:30.671899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:798:2798] finished for tablet 9437184 2024-11-21T23:18:30.671974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:798:2798] send ScanData to [1:797:2797] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:18:30.672415Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:798:2798] and sent to [1:797:2797] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.504},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.508}],"full":{"a":1732231110163109,"name":"_full_task","f":1732231110163109,"d_finished":0,"c":0,"l":1732231110672017,"d":508908},"events":[{"name":"bootstrap","f":1732231110163347,"d_finished":5687,"c":1,"l":1732231110169034,"d":5687},{"a":1732231110671717,"name":"ack","f":1732231110668042,"d_finished":2697,"c":3,"l":1732231110671636,"d":2997},{"a":1732231110671708,"name":"processing","f":1732231110169128,"d_finished":418699,"c":24,"l":1732231110671638,"d":419008},{"name":"ProduceResults","f":1732231110165724,"d_finished":7391,"c":29,"l":1732231110671884,"d":7391},{"a":1732231110671886,"name":"Finish","f":1732231110671886,"d_finished":0,"c":0,"l":1732231110672017,"d":131},{"name":"task_result","f":1732231110169148,"d_finished":415596,"c":21,"l":1732231110667852,"d":415596}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:798:2798]->[1:797:2797] 2024-11-21T23:18:30.672499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:18:30.162649Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2024-11-21T23:18:30.672537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:18:30.672696Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.068282s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.063471s;size=0.002211992;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.010474s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.018333s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:18:30.672771Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:798:2798];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:18:30.686056Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T23:18:30.686292Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2024-11-21T23:18:30.686457Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T23:18:30.690520Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T23:18:30.690643Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T23:18:30.691331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:805:2805];trace_detailed=; 2024-11-21T23:18:30.691846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T23:18:30.692068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:18:30.692270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:30.692414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:30.692680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:18:30.692784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:30.692921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:18:30.692968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:805:2805] finished for tablet 9437184 2024-11-21T23:18:30.693074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:805:2805] send ScanData to [1:804:2804] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:18:30.693488Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:805:2805] and sent to [1:804:2804] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1732231110691237,"name":"_full_task","f":1732231110691237,"d_finished":0,"c":0,"l":1732231110693123,"d":1886},"events":[{"name":"bootstrap","f":1732231110691487,"d_finished":960,"c":1,"l":1732231110692447,"d":960},{"a":1732231110692656,"name":"ack","f":1732231110692656,"d_finished":0,"c":0,"l":1732231110693123,"d":467},{"a":1732231110692639,"name":"processing","f":1732231110692639,"d_finished":0,"c":0,"l":1732231110693123,"d":484},{"name":"ProduceResults","f":1732231110692169,"d_finished":521,"c":2,"l":1732231110692954,"d":521},{"a":1732231110692957,"name":"Finish","f":1732231110692957,"d_finished":0,"c":0,"l":1732231110693123,"d":166}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:805:2805]->[1:804:2804] 2024-11-21T23:18:30.693590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:18:30.690749Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T23:18:30.693636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:18:30.693696Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T23:18:30.693799Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:805:2805];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495312 160000/9495312 160000/9495312 80000/4749668 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2024-11-21T23:18:35.008509Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1732231115008479 2024-11-21T23:18:35.502219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875988617009179:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:35.517790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:35.570541Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875991602937038:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:35.570606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00131b/r3tmp/tmpgvLIWR/pdisk_1.dat 2024-11-21T23:18:35.807323Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:35.807467Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:36.072451Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:36.080514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:36.080609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:36.084627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:36.084718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:36.098330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:36.103163Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:36.107301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22156, node 1 2024-11-21T23:18:36.393119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/00131b/r3tmp/yandex2QtON4.tmp 2024-11-21T23:18:36.393148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/00131b/r3tmp/yandex2QtON4.tmp 2024-11-21T23:18:36.393324Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/00131b/r3tmp/yandex2QtON4.tmp 2024-11-21T23:18:36.393452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:36.462268Z INFO: TTestServer started on Port 23496 GrpcPort 22156 TClient is connected to server localhost:23496 PQClient connected to localhost:22156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:36.856791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:18:39.500087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876005796879208:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.500431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.500525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876005796879221:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.506913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:18:39.511993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876008782806536:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.512238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876008782806513:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.512274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.529050Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:18:39.531940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876005796879223:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:39.542033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876008782806542:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:39.751117Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876008782806579:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:39.751117Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876005796879336:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:39.751436Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTg4NmNlNmItYjA0MTQwMmItNTRhMGY1MTQtMzIyMDA5ZWI=, ActorId: [2:7439876008782806511:2280], ActorState: ExecuteState, TraceId: 01jd8gagmj4wpz57y3q8x5dn82, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:39.753122Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2RkMjdjNjItOGZkZDQxODAtZTUwZDRiOTItMWQyMDU4YWY=, ActorId: [1:7439876005796879191:2302], ActorState: ExecuteState, TraceId: 01jd8gagm36yh1s7w6vh881vsn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:39.754259Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:39.754257Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:39.757006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.878579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:40.010789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:22156", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:18:40.321271Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8gah893aj9s8yew9ewc5rg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE2YzNjMjQtOTExNjVhNjctOTNiOTMxNTEtNDc5ZjlkZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439876010091847005:2968] 2024-11-21T23:18:40.503293Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875988617009179:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:40.503420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:40.573379Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875991602937038:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:40.573443Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path e ... 1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 1 partNo : 0 messageNo: 1 size 115 offset: -1 2024-11-21T23:18:47.993860Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 1 partNo 0 2024-11-21T23:18:47.994755Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2024-11-21T23:18:47.995481Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 177 WTime 1732231127994 2024-11-21T23:18:47.995726Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:18:47.998626Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 2024-11-21T23:18:47.998701Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:18:47.998766Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2024-11-21T23:18:47.998783Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T23:18:47.998974Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2024-11-21T23:18:47.999034Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2024-11-21T23:18:47.999030Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T23:18:47.999281Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2024-11-21T23:18:47.999358Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0 2024-11-21T23:18:47.999428Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2024-11-21T23:18:47.999482Z node 2 :PERSQUEUE DEBUG: FormAnswer 0 2024-11-21T23:18:47.999578Z node 2 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1732231127993 queuesize 0 startOffset 0 2024-11-21T23:18:47.999966Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:18:48.000623Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 4000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:18:48.000666Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2024-11-21T23:18:48.000697Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2024-11-21T23:18:48.001165Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler === AcksHandler has written a message, closing the session 2024-11-21T23:18:48.001619Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: try to update token 2024-11-21T23:18:48.001698Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 2024-11-21T23:18:48.002242Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|4b2724c3-d14eff9-1e716412-323e71b3_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T23:18:48.002605Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T23:18:48.002936Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:18:48.002986Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:18:48.003073Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2024-11-21T23:18:48.003293Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T23:18:48.004226Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:18:48.004247Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:18:48.004303Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2024-11-21T23:18:48.004528Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2024-11-21T23:18:48.005531Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2024-11-21T23:18:48.006063Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1732231128005 2024-11-21T23:18:48.006272Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:18:48.010698Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 2024-11-21T23:18:48.010756Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:18:48.010807Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2024-11-21T23:18:48.010835Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2024-11-21T23:18:48.013499Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T23:18:48.014236Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:18:48.014468Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 5000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:18:48.014511Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2024-11-21T23:18:48.014534Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2024-11-21T23:18:48.015878Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2024-11-21T23:18:48.016390Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: try to update token 2024-11-21T23:18:48.016438Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2024-11-21T23:18:48.016653Z :INFO: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2024-11-21T23:18:48.016721Z :INFO: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session will now close 2024-11-21T23:18:48.016772Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: aborting 2024-11-21T23:18:48.017627Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2024-11-21T23:18:48.017642Z :WARNING: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2024-11-21T23:18:48.017706Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session is aborting and will not restart 2024-11-21T23:18:48.017766Z :DEBUG: [/Root] SessionId [src_id|4b2724c3-d14eff9-1e716412-323e71b3_0] MessageGroupId [src_id] Write session: destroy 2024-11-21T23:18:48.018531Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|4b2724c3-d14eff9-1e716412-323e71b3_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T23:18:48.018587Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|4b2724c3-d14eff9-1e716412-323e71b3_0 grpc closed 2024-11-21T23:18:48.018604Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|4b2724c3-d14eff9-1e716412-323e71b3_0 is DEAD 2024-11-21T23:18:48.019288Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:18:48.021797Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:18:48.021859Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7439876040156619039:2500] destroyed 2024-11-21T23:18:48.021911Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:18:48.525638Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439876044451586370:2505], TxId: 281474976710691, task: 1, CA Id [1:7439876044451586368:2505]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T23:18:48.560466Z node 1 :KQP_COMPUTE WARN: SelfId: [1:7439876044451586370:2505], TxId: 281474976710691, task: 1, CA Id [1:7439876044451586368:2505]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 26910, MsgBus: 3706 2024-11-21T23:18:18.701647Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875917165015750:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:18.701964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d7/r3tmp/tmpmMvLA0/pdisk_1.dat 2024-11-21T23:18:19.491452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:19.491612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:19.503880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:19.558116Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26910, node 1 2024-11-21T23:18:19.702725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:19.702744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:19.702749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:19.702836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3706 TClient is connected to server localhost:3706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:20.416324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.439535Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:20.461720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.643415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.907105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:21.016142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:23.108130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875938639853799:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:23.108234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:23.346696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.412784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.493321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.524744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.605244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.662494Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875917165015750:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:23.663733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:23.708720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:23.800827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875938639854308:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:23.800940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:23.801101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875938639854313:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:23.810701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:23.827182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875938639854315:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 25387, MsgBus: 22758 2024-11-21T23:18:26.366215Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875953330283609:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d7/r3tmp/tmpLiUVXx/pdisk_1.dat 2024-11-21T23:18:26.453138Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:26.575741Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25387, node 2 2024-11-21T23:18:26.667093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:26.670863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:26.719353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:26.754851Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:26.754871Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:26.754877Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:26.754956Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22758 TClient is connected to server localhost:22758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:27.290484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.304040Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:27.325946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.409577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.577446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.652054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:30.854544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875970510154308:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:30.854640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:30.887673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:30.937514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:30.976957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:31.090105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:31.136313Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:31.191955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:31.252894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875974805122103:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.252997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.253195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875974805122108:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:31.256049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:31.266282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875974805122110:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:31.341732Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875953330283609:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:31.341797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3379, MsgBus: 2950 2024-11-21T23:18:33.875278Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875979601117558:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:33.875307Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d7/r3tmp/tmpDGmi8X/pdisk_1.dat 2024-11-21T23:18:33.998272Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:34.028122Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:34.028220Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:34.029914Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3379, node 3 2024-11-21T23:18:34.090113Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:34.090134Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:34.090145Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:34.090246Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2950 TClient is connected to server localhost:2950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:34.556096Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:34.568087Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:18:34.585460Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:34.684934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:34.938064Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:35.058907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:37.692810Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875996780988431:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.692884Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.757243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:37.801013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:37.840576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:37.881157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:37.936778Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:38.014940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:38.066088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876001075956225:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.066187Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.066368Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876001075956230:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.070779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:38.089982Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876001075956232:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:38.875869Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875979601117558:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:38.875957Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 23389, MsgBus: 14285 2024-11-21T23:18:11.130142Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875888026645293:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:11.130188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028f1/r3tmp/tmpxOZ53t/pdisk_1.dat 2024-11-21T23:18:11.880544Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:11.896363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:11.896454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:11.900093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23389, node 1 2024-11-21T23:18:12.114995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:12.115021Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:12.115028Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:12.115120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14285 TClient is connected to server localhost:14285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:13.039566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:13.088031Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:13.109277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:13.291903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:13.518329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:13.629110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:16.023366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875905206515948:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:16.043835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:16.077413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:16.131248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875888026645293:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:16.131328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:16.171264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:16.232291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:16.313640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:16.369445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:16.454528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:16.538307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875909501483753:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:16.538414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:16.538467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875909501483758:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:16.541968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:16.557366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875909501483760:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":null,"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Aggregate"}],"Node Type":"Aggregate","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1732231098280,"Host":"ghrun-uc25mmfz34","OutputRows":1,"StartTimeMs":1732231098279,"IngressRows":3,"ComputeTimeUs":97,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":1,"DstStageId":1,"Bytes":3}],"WaitInputTimeUs":2297,"TaskId":1,"OutputBytes":3}],"PeakMemoryUsageBytes":65536,"DurationUs":1000,"CpuTimeUs":1609},{"Tasks":[{"FinishTimeMs":1732231098280,"Host":"ghrun-uc25mmfz34","OutputRows":1,"StartTimeMs":1732231098279,"IngressRows":3,"ComputeTimeUs":52,"NodeId":1,"OutputChannels":[{"ChannelId":2,"Rows":1,"DstStageId":1,"Bytes":3}],"WaitInputTimeUs":2256,"TaskId":2,"OutputBytes":3}],"PeakMemoryUsageBytes":65536,"DurationUs":1000,"CpuTimeUs":412}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":2,"Max":1,"Min":1},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":6,"Max":3,"Min":3}},"Name":"4","Push":{"WaitTimeUs":{"Count":2,"Sum":4589,"Max":2321,"Min":2268},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":2,"Max":1,"Min":1},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2}}}],"DurationUs":{"Count":2,"Sum":2000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576},"Tasks":2,"OutputRows":{"Count":2,"Sum":2,"Max":1,"Min":1},"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":1000,"BaseTimeMs":1732231098277,"WaitInputTimeUs":{"Count":2,"Sum":4553,"Max":2297,"Min":2256},"OutputBytes":{"Count":2,"Sum":6,"Max":3,"Min":3},"CpuTimeUs":{"Count":2,"Sum":819,"Max":728,"Min":91},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":24,"Min":24}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":24,"Min":24},"WaitTimeUs":{"Count":2,"Sum":4587,"Max":2323,"Min":2264},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit","Stats":{"ComputeNodes":[{"Tasks":[{"InputBytes":6,"FinishTimeMs":1732231098281,"Host":"ghrun-uc25mmfz34","ResultRows":1,"ResultBytes":3,"OutputRows":1,"StartTimeMs":1732231098280,"InputRows":2,"ComputeTimeUs":202,"InputChannels":[{"WaitTimeUs":1658,"ChannelId":1,"Rows":1,"SrcStageId":0,"Bytes":3},{"WaitTimeUs":1688,"ChannelId":2,"Rows":1,"SrcStageId":0,"Bytes":3}],"NodeId":1,"OutputChannels":[{"ChannelId":3,"Rows":1,"DstStageId":0,"Bytes":3}],"WaitInputTimeUs":1550,"TaskId":3,"OutputBytes":3}],"PeakMemoryUsageBytes":131072,"DurationUs":1000,"CpuTimeUs":1163}],"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"PhysicalStageId":1,"InputBytes":{"Count":1,"Sum":6,"Max":6,"Min":6},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"BaseTimeMs":1732231098277,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"M ... 72057594046644480 2024-11-21T23:18:33.164514Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875980398277976:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":8,"Columns":["Key","Value1","Value2"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","Stats":{"UseLlvm":"undefined","DurationUs":{"Count":1,"Sum":8000,"Max":8000,"Min":8000},"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"RESULT","Push":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":8000,"BaseTimeMs":1732231115059,"CpuTimeUs":{"Count":1,"Sum":961,"Max":961,"Min":961},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-Filter","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"11","Push":{"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"PauseMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"WaitTimeUs":{"Count":1,"Sum":7277,"Max":7277,"Min":7277},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"DurationUs":{"Count":1,"Sum":2000,"Max":2000,"Min":2000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"StageDurationUs":2000,"BaseTimeMs":1732231115059,"WaitInputTimeUs":{"Count":1,"Sum":7196,"Max":7196,"Min":7196},"OutputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"CpuTimeUs":{"Count":1,"Sum":628,"Max":628,"Min":628},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"7","Push":{"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":6444,"Max":6444,"Min":6444},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":8,"Min":2}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":8476,"Max":8476,"Min":8476},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":10,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":2,"StageDurationUs":0,"BaseTimeMs":1732231115059,"WaitInputTimeUs":{"Count":1,"Sum":8393,"Max":8393,"Min":8393},"OutputBytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"CpuTimeUs":{"Count":1,"Sum":573,"Max":573,"Min":573},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18}},"Name":"9","Push":{"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":18,"Max":18,"Min":18},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":8474,"Max":8474,"Min":8474},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":10,"Min":1}}}]}}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"4","Push":{"WaitTimeUs":{"Count":1,"Sum":590,"Max":590,"Min":590},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1732231115048,"WaitInputTimeUs":{"Count":1,"Sum":570,"Max":570,"Min":570},"CpuTimeUs":{"Count":1,"Sum":520,"Max":520,"Min":520},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32},"WaitTimeUs":{"Count":1,"Sum":587,"Max":587,"Min":587},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":1006,"Max":1006,"Min":1006},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"InputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":1,"StageDurationUs":1000,"BaseTimeMs":1732231115048,"WaitInputTimeUs":{"Count":1,"Sum":956,"Max":956,"Min":956},"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":517,"Max":517,"Min":517},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"FirstMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":1033,"Max":1033,"Min":1033},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":425413,"CpuTimeUs":414804},"ProcessCpuTimeUs":568,"TotalDurationUs":458657,"ResourcePoolId":"default","QueuedTimeUs":1214},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["Key","Value1","Value2"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-Cpu":0.628,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-Cpu":1.201,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 4025, MsgBus: 2400 2024-11-21T23:17:34.622616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875726680719848:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:34.622669Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f75/r3tmp/tmpu1h3ow/pdisk_1.dat 2024-11-21T23:17:35.180168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:35.180314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:35.190955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:35.229095Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4025, node 1 2024-11-21T23:17:35.379155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:35.379186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:35.379194Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:35.379336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2400 TClient is connected to server localhost:2400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:36.027383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.064249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.229387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.396193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.467804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:38.283504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875743860590529:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:38.283652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:38.738703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.825209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.863666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.918792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.957806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.996683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.094185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875748155558327:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.094267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.094467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875748155558332:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.097687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:39.108505Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:17:39.108947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875748155558334:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:39.623390Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875726680719848:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:39.623459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:40.268696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.296197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.333437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.373996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.409647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.453675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28914, MsgBus: 23137 2024-11-21T23:17:42.874856Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875762156390176:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:42.874913Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f75/r3tmp/tmpM6abbt/pdisk_1.dat 2024-11-21T23:17:43.026802Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:43.047785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:43.047864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:43.053308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28914, node 2 2024-11-21T23:17:43.138297Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:43.138319Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:43.138327Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:43.138438Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23137 TClient is connected to server localhost:23137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:43.666756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsa ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:13.835988Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:13.915235Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.007373Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.057607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.140687Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.198808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.299536Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.393618Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875899756151512:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.393779Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.394710Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439875899756151517:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.410469Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:14.442564Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:18:14.443669Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439875899756151520:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:16.306666Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:16.389305Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 30036, MsgBus: 20828 2024-11-21T23:18:20.775434Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439875924148332112:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:20.775553Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f75/r3tmp/tmpDea3bh/pdisk_1.dat 2024-11-21T23:18:21.216236Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:21.216999Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:21.217101Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:21.220076Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30036, node 6 2024-11-21T23:18:21.451117Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:21.451148Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:21.451160Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:21.451295Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20828 TClient is connected to server localhost:20828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:18:22.523452Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:18:22.537869Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:22.632082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:23.189578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:23.345736Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.778534Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439875924148332112:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:25.778621Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:26.721279Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875949918137599:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:26.721410Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:26.777418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:26.841766Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:26.920477Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.041777Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.108549Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.193900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.278035Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875954213105399:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.278151Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.278383Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439875954213105404:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.284064Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:27.322199Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:18:27.322506Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439875954213105406:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:29.064049Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:18:29.126325Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DatashardReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 12562, MsgBus: 16441 2024-11-21T23:18:09.784565Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875878529376715:2183];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:09.784616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028fa/r3tmp/tmpr8BHon/pdisk_1.dat 2024-11-21T23:18:10.445201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:10.445287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:10.449992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:10.467966Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12562, node 1 2024-11-21T23:18:10.709058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:10.709078Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:10.709084Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:10.709160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16441 TClient is connected to server localhost:16441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:11.621507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.659874Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:11.702545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:11.925085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:12.122504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:12.223666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:14.633982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875900004214796:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.648678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:14.678124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.743556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.787872Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875878529376715:2183];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:14.787938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:14.825314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.899721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:14.948623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.053931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:15.176996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875904299182602:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.177105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.177444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875904299182607:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:15.181771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:15.198590Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:18:15.198970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875904299182609:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:16.341521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.245783Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439875925774020366:2580] TxId: 281474976710672. Ctx: { TraceId: 01jd8g9xkr0hv9y1k3zkb598qr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgyYzMzOWEtNTk5MzI0NGQtN2ZkNGRiYTItMTQ1M2ZiODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. } 2024-11-21T23:18:20.261969Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjgyYzMzOWEtNTk5MzI0NGQtN2ZkNGRiYTItMTQ1M2ZiODM=, ActorId: [1:7439875925774020349:2580], ActorState: ExecuteState, TraceId: 01jd8g9xkr0hv9y1k3zkb598qr, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. Trying to start YDB, gRPC: 18233, MsgBus: 21783 2024-11-21T23:18:21.403852Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875927932318495:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028fa/r3tmp/tmplTdhQg/pdisk_1.dat 2024-11-21T23:18:21.563300Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:21.679001Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:21.689822Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:21.689922Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:21.693403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18233, node 2 2024-11-21T23:18:21.914941Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:21.914965Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:21.914975Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:21.915073Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21783 TClient is connected to server localhost:21783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:23.021649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 w ... sor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:26.395931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:26.439963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:26.524756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:26.575317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:26.668754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:26.752843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875949407157050:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:26.752949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:26.753264Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875949407157055:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:26.758777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:26.772769Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875949407157057:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:28.081667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:28.427023Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTFlZThkYjQtNTczYjNiOGEtNGU4OTI5OC05ODcwMWI3MA==, ActorId: [2:7439875957997092292:2487], ActorState: ExecuteState, TraceId: 01jd8ga5mwcp6g0kxq7h0v8e62, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:915: Memory limit exception at ExecuteState, current limit is 1024 bytes.
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:915: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 8345, MsgBus: 6818 2024-11-21T23:18:29.391791Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875963038190111:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:29.391865Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028fa/r3tmp/tmpzLzyoX/pdisk_1.dat 2024-11-21T23:18:29.560005Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:29.588108Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:29.588218Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:29.591485Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8345, node 3 2024-11-21T23:18:29.655602Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:29.655627Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:29.655636Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:29.655747Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6818 TClient is connected to server localhost:6818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:30.170529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:30.186460Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:18:30.213430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:30.307791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:30.575436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:30.678841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:33.433403Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875980218060985:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:33.433530Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:33.498577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:33.566958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:33.652484Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:33.733314Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:33.777843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:33.867979Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:33.928238Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875980218061494:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:33.928353Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:33.928676Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439875980218061499:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:33.933517Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:33.950449Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439875980218061501:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:34.411940Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875963038190111:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:34.412005Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:35.224409Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:44.543876Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjM5MzJkNWEtZGNmOWZlNjYtODI3M2EyZDktZTAzYTIzM2U=, ActorId: [3:7439876018872769124:2724], ActorState: ExecuteState, TraceId: 01jd8gakd6f4gaz0r0ac6sg76f, Create QueryResponse for error on request, msg: 2024-11-21T23:18:44.544811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:44.544857Z node 3 :IMPORT WARN: Table profiles were not loaded
: Error: Query result size limit exceeded. (200003965 > 50331648), code: 2013 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2024-11-21T23:18:26.741996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875951328122698:2185];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:26.742118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001559/r3tmp/tmprnwZET/pdisk_1.dat 2024-11-21T23:18:27.415164Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:27.415908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:27.415991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:27.422086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24371 TServer::EnableGrpc on GrpcPort 6976, node 1 2024-11-21T23:18:27.865263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:27.865301Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:27.865315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:27.865422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:28.247855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:28.265491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:28.413881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231108307 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231108489 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231108307 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231108489 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:18:28.466552Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:28.466712Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:28.466747Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:28.467477Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:30.690632Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231108377, tx_id: 281474976710658 } } } 2024-11-21T23:18:30.690960Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:30.692468Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:18:30.694317Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231108489 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72 ... VE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:31.668246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21879 TServer::EnableGrpc on GrpcPort 25194, node 2 2024-11-21T23:18:31.909063Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:31.909093Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:31.909101Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:31.909280Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:32.423855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:32.431148Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:32.437381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:32.553320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231112472 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231112633 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231112472 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231112633 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:18:32.602024Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:32.602189Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:32.602206Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:32.602778Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:35.573488Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231112556, tx_id: 281474976710658 } } } 2024-11-21T23:18:35.573835Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:35.575287Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:18:35.576196Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231112633 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T23:18:35.576359Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=132231628.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231628.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231628.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231628.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231628.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231628.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231628.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230428.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112231628.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112231628.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230428.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112230428.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112230428.000000s;Name=;Codec=}; 2024-11-21T23:17:09.791475Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:17:10.478572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:17:10.513662Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:17:10.513768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:17:10.514076Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:17:10.571715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:17:10.571964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:17:10.572219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:17:10.572335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:17:10.572441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:17:10.572549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:17:10.572669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:17:10.572805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:17:10.572915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:17:10.573023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:17:10.573122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:17:10.573221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:17:10.615911Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:17:10.615981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:17:10.662760Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:17:10.670585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:17:10.670663Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:17:10.670856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:17:10.671119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:17:10.671163Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:17:10.671257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:17:10.671393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:17:10.671428Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:17:10.671617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:17:10.671718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:17:10.671750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:17:10.671838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:17:10.671892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:17:10.671932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:17:10.671962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:17:10.672038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:17:10.672074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:17:10.672102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:17:10.672149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:17:10.672187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:17:10.672216Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:17:10.672593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2024-11-21T23:17:10.673417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=764; 2024-11-21T23:17:10.673530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2024-11-21T23:17:10.673615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2024-11-21T23:17:10.673797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:17:10.673877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:17:10.673922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateS ... ime=80; 2024-11-21T23:18:31.379489Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:18:31.379552Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=131; 2024-11-21T23:18:31.379656Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=59; 2024-11-21T23:18:31.379753Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=48; 2024-11-21T23:18:31.380041Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=241; 2024-11-21T23:18:31.380728Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=617; 2024-11-21T23:18:31.380863Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=60; 2024-11-21T23:18:31.380958Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=41; 2024-11-21T23:18:31.381004Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2024-11-21T23:18:31.381052Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2024-11-21T23:18:31.381093Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2024-11-21T23:18:31.381177Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2024-11-21T23:18:31.381220Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2024-11-21T23:18:31.381298Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2024-11-21T23:18:31.381335Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2024-11-21T23:18:31.381399Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2024-11-21T23:18:31.381444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15789; 2024-11-21T23:18:31.381596Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:18:31.381726Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:18:31.381770Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard_impl.cpp:1558;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T23:18:31.381814Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:18:31.381921Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T23:18:31.382036Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T23:18:31.382141Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:18:31.382186Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:18:31.382262Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:18:31.382301Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:18:31.382369Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:18:31.382494Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T23:18:31.382558Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:18:31.382599Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:18:31.382648Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:18:31.382685Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:18:31.382728Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:18:31.382833Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:18:31.383588Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:18:31.383685Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1531:3400];tablet_id=9437184;parent=[1:1492:3368];fline=manager.h:99;event=ask_data;request=request_id=122;1={portions_count=21};; 2024-11-21T23:18:31.384760Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:18:31.387878Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:18:31.387916Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:18:31.387935Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:18:31.387972Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:18:31.388038Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:18:31.388115Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T23:18:31.388170Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:18:31.388210Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:18:31.388253Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:18:31.388284Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:18:31.388319Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:18:31.388382Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:18:31.388787Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T23:18:31.389960Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T23:18:31.391654Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:18:31.391703Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1492:3368];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 15674, MsgBus: 14076 2024-11-21T23:18:14.648083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875897924712596:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:14.648133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028e9/r3tmp/tmp3zgdUi/pdisk_1.dat 2024-11-21T23:18:15.395993Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15674, node 1 2024-11-21T23:18:15.485527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:15.485612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:15.487119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:15.651155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:15.651362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:15.651375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:15.651522Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14076 TClient is connected to server localhost:14076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:16.484542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:16.504483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:16.740866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:16.997865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:17.094520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:19.141733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875919399550718:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.141861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.590349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.632409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.653966Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875897924712596:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:19.654024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:19.681664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.755756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.806726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.859848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:19.939624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875919399551217:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.939708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.940063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875919399551222:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:19.944192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:19.968895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875919399551224:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:21.585858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:21.924801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:18:21.965844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [1, 4)","Key [42, 42]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key [1, 4)","Key [42, 42]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [1, 4)","Key [42, 42]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":4}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 10714, MsgBus: 18327 2024-11-21T23:18:24.302800Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875942550808621:2125];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:24.302917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028e9/r3tmp/tmp49U52f/pdisk_1.dat 2024-11-21T23:18:24.587118Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:24.627386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:24.627480Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:24.630194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10714, node 2 2024-11-21T23:18:24.850998Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:24.851018Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:24.851026Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:24.851133Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18327 TClient is connected to server localhost:18327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ... 2024-11-21T23:18:32.849809Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:18:32.849925Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:18:32.849948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["OlapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"Value \u003E 0","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Value \u003E 0","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 3255, MsgBus: 19400 2024-11-21T23:18:35.148161Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875988131111357:2090];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028e9/r3tmp/tmpWURjCb/pdisk_1.dat 2024-11-21T23:18:35.242384Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:35.365472Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:35.367306Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:35.367725Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:35.383564Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3255, node 3 2024-11-21T23:18:35.488877Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:35.488899Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:35.488911Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:35.489027Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19400 TClient is connected to server localhost:19400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:36.140411Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:36.172986Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:36.276368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:36.500904Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:36.576107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:39.187738Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876005310982204:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.187851Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.234099Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.268988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.299355Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.331873Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.373274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.431213Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.482729Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876005310982702:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.482835Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.487649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876005310982707:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.491731Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:39.504188Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876005310982709:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:40.147818Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439875988131111357:2090];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:40.147891Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:40.592685Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:40.829459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:41.995307Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7439876013900919714:2585] TxId: 281474976710674. Ctx: { TraceId: 01jd8gajmq6eb5pfq22mq006xb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWUyY2YzY2EtZTY1NDUwMzAtNmNmMDY2ZWQtOTJkNDY2OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Too many affected shards: datashardTasks=21, limit: 20 2024-11-21T23:18:42.007636Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWUyY2YzY2EtZTY1NDUwMzAtNmNmMDY2ZWQtOTJkNDY2OTE=, ActorId: [3:7439876013900919258:2585], ActorState: ExecuteState, TraceId: 01jd8gajmq6eb5pfq22mq006xb, Create QueryResponse for error on request, msg:
: Error: Affected too many shards: 0, code: 2029 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2024-11-21T23:18:45.451888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876035275680720:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:45.451936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001550/r3tmp/tmpqJcu7u/pdisk_1.dat 2024-11-21T23:18:45.801786Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:45.853087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:45.853199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:45.855484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8486 TServer::EnableGrpc on GrpcPort 25211, node 1 2024-11-21T23:18:46.173896Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:46.173947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:46.173961Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:46.174177Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:46.626578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:46.653943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:18:46.798809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231126689 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231126871 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231126689 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231126871 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:18:46.842572Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:46.842782Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:46.842798Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:46.843406Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:48.877502Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231126780, tx_id: 281474976710658 } } } 2024-11-21T23:18:48.877908Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:48.879544Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:18:48.881508Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231126871 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: ... t_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001550/r3tmp/tmpkD0tcM/pdisk_1.dat 2024-11-21T23:18:49.593853Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:49.622741Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:49.622836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:49.624387Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62808 TServer::EnableGrpc on GrpcPort 4679, node 2 2024-11-21T23:18:49.831627Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:49.831655Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:49.831662Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:49.831755Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:18:50.116756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:50.123839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:18:50.156153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231130168 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231130231 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231130168 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231130231 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:18:50.195214Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:50.195331Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:50.195345Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:50.195815Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:52.377859Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231130196, tx_id: 281474976715658 } } } 2024-11-21T23:18:52.378106Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:52.379451Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:18:52.380588Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231130231 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T23:18:52.380740Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2024-11-21T23:18:47.746488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876040190836536:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:47.746564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001549/r3tmp/tmpp6vDLb/pdisk_1.dat 2024-11-21T23:18:48.098847Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:48.144180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:48.144249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:48.146020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4815 TServer::EnableGrpc on GrpcPort 10353, node 1 2024-11-21T23:18:48.336664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:48.336688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:48.336698Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:48.336809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:48.659029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:48.672221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:48.788964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231128712 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231128852 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231128712 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231128852 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:18:48.816518Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:48.816742Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:48.816782Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:48.817115Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:50.561399Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231128782, tx_id: 281474976710658 } } } 2024-11-21T23:18:50.561785Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:50.563167Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:18:50.565003Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231128852 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 M ... 024-11-21T23:18:51.264837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:51.264934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:51.266687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16384 TServer::EnableGrpc on GrpcPort 29756, node 2 2024-11-21T23:18:51.492600Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:51.492623Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:51.492631Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:51.492735Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:51.767406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:51.775838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:51.806364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231131813 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231131876 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231131813 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231131876 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:18:51.838869Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:51.839080Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:51.839107Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:51.839515Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:53.866310Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231131841, tx_id: 281474976715658 } } } 2024-11-21T23:18:53.866583Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:53.867960Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:18:53.868737Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231131876 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T23:18:53.868869Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 >> DstCreator::CannotFindColumn [GOOD] >> DstCreator::UnsupportedReplicationMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2024-11-21T23:18:56.517819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876078816242679:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:56.517923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001544/r3tmp/tmpDKDHl9/pdisk_1.dat 2024-11-21T23:18:56.969321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:56.969481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:56.971081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:56.986117Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21368 TServer::EnableGrpc on GrpcPort 1241, node 1 2024-11-21T23:18:57.186985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:57.187006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:57.187017Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:57.187147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:57.527077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:57.550197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231137882 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partition... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231137588 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231137882 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T23:18:57.902455Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:57.902792Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:57.902815Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:57.903228Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:59.450855Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231137882, tx_id: 281474976710658 } } } 2024-11-21T23:18:59.451342Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:59.453805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2024-11-21T23:18:59.455510Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2024-11-21T23:18:59.455547Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2024-11-21T23:18:59.489052Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2024-11-21T23:18:59.490129Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231139527 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 7 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ... paction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 8 PathOwnerId: 72057594046644480 } 2024-11-21T23:18:59.501678Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 8] TClient::Ls request: /Root/Dir/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231139527 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231139527 ParentPathId: 7 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ... (TRUNCATED) TClient::Ls request: /Root/Dir/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231139527 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231139527 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> SystemView::CollectScanQueries [GOOD] Test command err: 2024-11-21T23:15:39.446861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875234520815556:2057];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:39.470635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00191e/r3tmp/tmps8mbT1/pdisk_1.dat 2024-11-21T23:15:40.528949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:40.628030Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:40.688068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:40.688161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:40.703596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8593, node 1 2024-11-21T23:15:41.046986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:41.047006Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:41.047013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:41.047113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:41.622900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:41.976261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:42.000347Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7439875234520815499:2070], interval end# 2024-11-21T23:15:42.000000Z, event interval end# 2024-11-21T23:15:42.000000Z 2024-11-21T23:15:42.000396Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7439875234520815499:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2024-11-21T23:15:42.001096Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [1:7439875234520815686:2186], interval end# 2024-11-21T23:15:42.000000Z, event interval end# 2024-11-21T23:15:42.000000Z 2024-11-21T23:15:42.001122Z node 1 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [1:7439875234520815686:2186], query logs count# 0, processor ids count# 1, processor id to database count# 0 2024-11-21T23:15:42.006226Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [3:7439875235148978548:2061], interval end# 2024-11-21T23:15:42.000000Z, event interval end# 2024-11-21T23:15:42.000000Z 2024-11-21T23:15:42.006279Z node 3 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [3:7439875235148978548:2061], query logs count# 0, processor ids count# 0, processor id to database count# 0 2024-11-21T23:15:42.019696Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875248033880494:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:42.019755Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/PQ/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:42.050725Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [2:7439875234664116345:2061], interval end# 2024-11-21T23:15:42.000000Z, event interval end# 2024-11-21T23:15:42.000000Z 2024-11-21T23:15:42.050782Z node 2 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [2:7439875234664116345:2061], query logs count# 0, processor ids count# 0, processor id to database count# 0 2024-11-21T23:15:42.098562Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875247549018430:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:42.098875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/PQ/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:42.399402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:42.399506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2024-11-21T23:15:42.420720Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:15:42.463631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:42.623141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:42.623270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:42.636554Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:15:42.637661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:42.735975Z node 3 :SYSTEM_VIEWS INFO: [72075186224037895] OnActivateExecutor 2024-11-21T23:15:42.736063Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInitSchema::Execute 2024-11-21T23:15:42.767371Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7439875248033880564:2122], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T23:15:43.216604Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T23:15:43.218097Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7439875248033880564:2122], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/PQ 2024-11-21T23:15:43.218200Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInitSchema::Complete 2024-11-21T23:15:43.218243Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInit::Execute 2024-11-21T23:15:43.222799Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T23:15:43.222856Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval metrics: query count# 0 2024-11-21T23:15:43.222915Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading interval query tops: total query count# 0 2024-11-21T23:15:43.222961Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T23:15:43.223013Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 6, result count# 0 2024-11-21T23:15:43.223047Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 7, result count# 0 2024-11-21T23:15:43.223084Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 8, result count# 0 2024-11-21T23:15:43.223149Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 9, result count# 0 2024-11-21T23:15:43.223187Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 10, result count# 0 2024-11-21T23:15:43.223239Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 11, result count# 0 2024-11-21T23:15:43.223280Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 12, result count# 0 2024-11-21T23:15:43.223320Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 13, result count# 0 2024-11-21T23:15:43.223354Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 14, result count# 0 2024-11-21T23:15:43.223379Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 15, result count# 0 2024-11-21T23:15:43.223421Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 16, partCount count# 0 2024-11-21T23:15:43.223459Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 19, partCount count# 0 2024-11-21T23:15:43.223499Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 17, result count# 0 2024-11-21T23:15:43.223537Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Loading results: table# 18, result count# 0 2024-11-21T23:15:43.223693Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] Reset: interval end# 2024-11-21T23:15:43.000000Z 2024-11-21T23:15:43.223937Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:43.224082Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7439875248033880564:2122], database# /Root/PQ, no sysview processor 2024-11-21T23:15:43.824111Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxInit::Complete 2024-11-21T23:15:43.838166Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] TTxAggregate::Execute 2024-11-21T23:15:43.845008Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryResults: interval end# 2024-11-21T23:15:43.000000Z, query count# 0 2024-11-21T23:15:43.855071Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T23:15:43.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:43.863463Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T23:15:43.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:43.863498Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T23:15:43.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:43.863515Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T23:15:43.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:43.871615Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 9, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:43.871643Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 11, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:43.871660Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037895] PersistQueryTopResults: table id# 13, interval end# ... T_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:52.637206Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:52.643030Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:18:52.651899Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:55.285596Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439876076467834167:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:55.285596Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7439876076467834177:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:55.285750Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:55.289225Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:18:55.299368Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7439876076467834181:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:18:55.702823Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8gb01keym36jaqzhkq3nea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZDliYzU0MDUtNDQzZWE1ZGEtYjZiMDgwMTgtMWY4MDRiYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:18:55.937220Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8gb0nz9dmzj5xhfhhk64wg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTA5MmQxNDUtYzFiZDgzNDMtOGYwZWU2ZTktN2M1ZTJkNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:18:56.168282Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8gb0pccyhgmj7gespvej53, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTY3NTEyYmQtMWM5ZDc5ODEtNGM1MWEyNTQtZTRiNjU2Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:18:56.178069Z node 7 :SYSTEM_VIEWS INFO: Scan started, actor: [7:7439876080762801624:2337], owner: [7:7439876080762801620:2335], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T23:18:56.178639Z node 7 :SYSTEM_VIEWS INFO: Scan prepared, actor: [7:7439876080762801624:2337], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:18:56.194377Z node 7 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [7:7439876080762801624:2337], row count: 2, finished: 1 2024-11-21T23:18:56.194481Z node 7 :SYSTEM_VIEWS INFO: Scan finished, actor: [7:7439876080762801624:2337], owner: [7:7439876080762801620:2335], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T23:18:56.204780Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231136167, txId: 281474976715663] shutting down 2024-11-21T23:18:57.068767Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7439876084235901003:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:57.068841Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00191e/r3tmp/tmp19qu7W/pdisk_1.dat 2024-11-21T23:18:57.173917Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:57.205878Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:57.205991Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:57.208747Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9739, node 8 2024-11-21T23:18:57.253988Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:57.254072Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:57.254086Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:57.254220Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:57.523795Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:57.534905Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:00.629583Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439876097120803601:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:00.629677Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7439876097120803593:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:00.629813Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:00.633522Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2024-11-21T23:19:00.643598Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7439876097120803607:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2024-11-21T23:19:00.809894Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8gb58kefxvcvset3qxy649, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=NzQ1MmQwYTQtYmQxMzhiY2QtM2VkMzlmNjktNjZmOWMxNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:19:00.918740Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8gb5evczmm757hs7my8km6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ODBkOWM0YzMtN2IyOGM0MzMtZWQ0ODkxYjYtN2QzYTkzOWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:19:00.929595Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231140962, txId: 281474976715662] shutting down 2024-11-21T23:19:01.087619Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8gb5jhb752txxkzp4bc3qk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MmRlODRlOWItYjJiNzQ2MjUtZWMxMWM3ZTMtNzk4ZDExOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:19:01.091176Z node 8 :SYSTEM_VIEWS INFO: Scan started, actor: [8:7439876101415771069:2337], owner: [8:7439876101415771065:2335], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T23:19:01.091831Z node 8 :SYSTEM_VIEWS INFO: Scan prepared, actor: [8:7439876101415771069:2337], schemeshard id: 72057594046644480, hive id: 72057594037968897, tenant name: /Root, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], tenant node count: 1 2024-11-21T23:19:01.092456Z node 8 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [8:7439876101415771069:2337], row count: 2, finished: 1 2024-11-21T23:19:01.092505Z node 8 :SYSTEM_VIEWS INFO: Scan finished, actor: [8:7439876101415771069:2337], owner: [8:7439876101415771065:2335], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2024-11-21T23:19:01.094567Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231141086, txId: 281474976715664] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2024-11-21T23:18:33.034845Z :FallbackToSingleDb INFO: Random seed for debugging is 1732231113034797 2024-11-21T23:18:33.868465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875983722083464:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:33.868518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:33.944349Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875980306076317:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:34.118887Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:34.151563Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00132e/r3tmp/tmpiubRxo/pdisk_1.dat 2024-11-21T23:18:34.182592Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:34.602092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:34.602201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:34.603362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:34.603409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:34.606297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:34.609629Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:34.610790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:34.630675Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10706, node 1 2024-11-21T23:18:34.689181Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:34.689348Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:34.872609Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/00132e/r3tmp/yandexJOlaqD.tmp 2024-11-21T23:18:34.872640Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/00132e/r3tmp/yandexJOlaqD.tmp 2024-11-21T23:18:34.872836Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/00132e/r3tmp/yandexJOlaqD.tmp 2024-11-21T23:18:34.872956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:34.957439Z INFO: TTestServer started on Port 2750 GrpcPort 10706 TClient is connected to server localhost:2750 PQClient connected to localhost:10706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:35.321620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:18:37.936857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876000901953489:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.937007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.938029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876000901953499:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.942384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:18:37.994625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876000901953503:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:38.327502Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876001780912989:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:38.328391Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Njg2ZDRiZi0xY2QwYWY3YS1jMDYxZmM5NS1lNzA1ODliNQ==, ActorId: [2:7439876001780912949:2280], ActorState: ExecuteState, TraceId: 01jd8gaf7q1kjgpn1tbk6vd5re, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:38.331134Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:38.345240Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876005196920904:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:38.345538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:38.347144Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTlmNWEyYjktZDI3ZjJlZmItNmVlNDU5ZDAtOTQxYTA5Zjg=, ActorId: [1:7439876000901953486:2303], ActorState: ExecuteState, TraceId: 01jd8gaf2z5w0h6xdab1k72nf2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:38.347559Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:38.493104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:38.648143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:10706", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:18:38.872210Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875983722083464:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:38.872305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:38.922736Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875980306076317:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:38.922812Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:38.941375Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8gafxaatrnvagxgw2fv6tr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWUyZmZkMTQtOTEyOWQ3YjItMmMyMDgwYTYtZGY0ZDIwM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439876005196921289:2977] === CheckClustersList. Ok 2024-11-21T23:18:45.061491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:10706 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:18:45.151590Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:10706 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ... CHOOSER DEBUG: TPartitionChooser [3:7439876098517385693:2477] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T23:19:00.301537Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876098517385693:2477] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T23:19:00.301821Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876098517385741:2477] connected; active server actors: 1 2024-11-21T23:19:00.301882Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876098517385693:2477] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T23:19:00.301907Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876098517385693:2477] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T23:19:00.302348Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876098517385741:2477] disconnected; active server actors: 1 2024-11-21T23:19:00.302378Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876098517385741:2477] disconnected no session 2024-11-21T23:19:00.462601Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876098517385693:2477] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T23:19:00.462648Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876098517385693:2477] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T23:19:00.462669Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876098517385693:2477] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:19:00.462701Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:19:00.463655Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T23:19:00.466115Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0 2024-11-21T23:19:00.464433Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:00.464483Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439876098517385763:2477], now have 1 active actors on pipe 2024-11-21T23:19:00.464549Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:19:00.464582Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:19:00.464677Z node 4 :PERSQUEUE INFO: new Cookie src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:19:00.464783Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:19:00.464829Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:19:00.465660Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:19:00.465684Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:19:00.465766Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:19:00.467628Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732231140467 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:00.467732Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:19:00.467943Z :INFO: [] MessageGroupId [src] SessionId [src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0] Write session: close. Timeout = 0 ms 2024-11-21T23:19:00.467996Z :INFO: [] MessageGroupId [src] SessionId [src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0] Write session will now close 2024-11-21T23:19:00.468056Z :DEBUG: [] MessageGroupId [src] SessionId [src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0] Write session: aborting 2024-11-21T23:19:00.468252Z :INFO: [] MessageGroupId [src] SessionId [src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:19:00.468316Z :DEBUG: [] MessageGroupId [src] SessionId [src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0] Write session: destroy 2024-11-21T23:19:00.470155Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0 grpc read done: success: 0 data: 2024-11-21T23:19:00.470180Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0 grpc read failed 2024-11-21T23:19:00.470202Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0 grpc closed 2024-11-21T23:19:00.470236Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b42c3fa3-7a90d289-5f98c7d0-cb5cac3e_0 is DEAD PORTS 24890 24053 2024-11-21T23:19:00.475771Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:19:00.482300Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:00.482360Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876098517385763:2477] destroyed 2024-11-21T23:19:00.482428Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2024-11-21T23:19:01.518829Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2024-11-21T23:19:01.518967Z :INFO: [/Root] [] [843c5ba4-b0aeeda1-50166658-1591721b] Open read subsessions to databases: { name: , endpoint: localhost:24053, path: /Root } 2024-11-21T23:19:01.519259Z :INFO: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Starting read session 2024-11-21T23:19:01.519300Z :DEBUG: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Starting single session 2024-11-21T23:19:01.520300Z :DEBUG: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T23:19:01.520339Z :DEBUG: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T23:19:01.520402Z :DEBUG: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] [] Reconnecting session to cluster in 0.000000s 2024-11-21T23:19:01.520646Z :ERROR: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:24053
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24053. 2024-11-21T23:19:01.520712Z :DEBUG: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T23:19:01.520763Z :DEBUG: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2024-11-21T23:19:01.520957Z :INFO: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:24053" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:24053
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24053. " } 2024-11-21T23:19:01.521306Z :NOTICE: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:19:01.521368Z :DEBUG: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:24053" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:24053
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:24053. " } 2024-11-21T23:19:01.521533Z :INFO: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Closing read session. Close timeout: 0.010000s 2024-11-21T23:19:01.521568Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:19:01.521603Z :INFO: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:01.521656Z :INFO: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:01.521683Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:19:01.521714Z :INFO: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:01.521753Z :INFO: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:01.521776Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:19:01.521827Z :INFO: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:01.521908Z :NOTICE: [/Root] [/Root] [6a2abd06-55ccc35a-6e20dd94-ef47c05] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:19:01.836575Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876102812353154:2505], TxId: 281474976715691, task: 1, CA Id [3:7439876102812353152:2505]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T23:19:01.866689Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876102812353154:2505], TxId: 281474976715691, task: 1, CA Id [3:7439876102812353152:2505]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:01.914258Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876102812353154:2505], TxId: 281474976715691, task: 1, CA Id [3:7439876102812353152:2505]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:01.972076Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876102812353154:2505], TxId: 281474976715691, task: 1, CA Id [3:7439876102812353152:2505]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:02.080756Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876102812353154:2505], TxId: 281474976715691, task: 1, CA Id [3:7439876102812353152:2505]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |86.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2024-11-21T23:18:56.399753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876078258138591:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:56.399818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001541/r3tmp/tmp2JaaaP/pdisk_1.dat 2024-11-21T23:18:56.685204Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:56.755247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:56.755327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:56.759378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23319 TServer::EnableGrpc on GrpcPort 65478, node 1 2024-11-21T23:18:56.974202Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:56.974223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:56.974233Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:56.974357Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:57.259275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:57.278153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231137385 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231137315 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231137385 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T23:18:57.439280Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:57.440085Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:57.440116Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:57.440732Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:59.301888Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231137385, tx_id: 281474976710658 } } } 2024-11-21T23:18:59.302222Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:59.303797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:18:59.304389Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2024-11-21T23:18:59.304407Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2024-11-21T23:18:59.328237Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2024-11-21T23:18:59.328283Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231139373 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T23:18:59.860321Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876092417831774:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:59.860380Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001541/r3tmp/tmpdXft2E/pdisk_1.dat 2024-11-21T23:18:59.943590Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:59.964637Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:59.964701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:59.966001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18872 TServer::EnableGrpc on GrpcPort 19230, node 2 2024-11-21T23:19:00.135101Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:00.135127Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:00.135137Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:00.135254Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:00.437003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:00.443440Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:00.446666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:00.494439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231140486 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231140563 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231140486 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231140563 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:19:00.529819Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:00.529956Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:00.529974Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:19:00.530309Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:19:02.786657Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231140514, tx_id: 281474976715658 } } } 2024-11-21T23:19:02.786945Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:19:02.788417Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:19:02.789601Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231140563 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T23:19:02.789854Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value >> DstCreator::WithSyncIndex |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::UnsupportedReplicationMode [GOOD] Test command err: 2024-11-21T23:18:56.499595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876079158545577:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:56.499942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00153f/r3tmp/tmpH0PQeJ/pdisk_1.dat 2024-11-21T23:18:56.937622Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:56.941296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:56.941429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:56.944589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7098 TServer::EnableGrpc on GrpcPort 61555, node 1 2024-11-21T23:18:57.153285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:57.153322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:57.153332Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:57.153463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:57.547200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:57.571252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231137889 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partition... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231137602 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231137889 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T23:18:57.911849Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:57.912042Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:18:57.912076Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:18:57.912717Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:18:59.606854Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231137889, tx_id: 281474976710658 } } } 2024-11-21T23:18:59.607236Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:18:59.608544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:18:59.609151Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2024-11-21T23:18:59.609198Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2024-11-21T23:18:59.634903Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 TClient::Ls request: /Root/Replicated 2024-11-21T23:18:59.634928Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 5] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231139674 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T23:19:00.282215Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876096306942055:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00153f/r3tmp/tmpTvg2tk/pdisk_1.dat 2024-11-21T23:19:00.317462Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:00.375176Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:00.386362Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:00.386546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:00.387945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30029 TServer::EnableGrpc on GrpcPort 14088, node 2 2024-11-21T23:19:00.603581Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:00.603605Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:00.603613Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:00.603704Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30029 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:19:00.884892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:00.891206Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:00.896746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:00.968156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231140934 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231141074 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231140934 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231141074 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:19:01.041388Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:01.041523Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:01.041536Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:19:01.044764Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:19:03.083938Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231141004, tx_id: 281474976715658 } } } 2024-11-21T23:19:03.084267Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:19:03.085726Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:19:03.086764Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1732231141074 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE Consistency: CONSISTENCY_UNKNOWN } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T23:19:03.086999Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Unsupported replication mode |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> KqpStats::JoinNoStatsYql >> KqpQueryPerf::Update+QueryService >> KqpScanSpilling::SelfJoinQueryService |86.9%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |86.9%| [LD] {RESULT} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/bsconfig/ut/ydb-services-bsconfig-ut |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |86.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> KqpStats::RequestUnitForBadRequestExplicitPrepare ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2024-11-21T23:16:25.360216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:25.360635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:25.360704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8927, node 1 TClient is connected to server localhost:5944 2024-11-21T23:16:25.928433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:592:2504], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:25.928542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:602:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:25.928963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:25.952739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:25.952805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:25.952843Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:25.953020Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:25.953910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:16:26.004803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:26.004926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:26.010076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:16:26.039626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:26.154260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:606:2512], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:16:26.580507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd8g6e658kzwr0jybyjgqyr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg3NjczZTktZGUxMGRkYWEtZTQ1OWZiNmYtZDc4NWIxODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:34.801430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:296:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:34.801650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:34.801750Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 29439, node 2 TClient is connected to server localhost:12174 2024-11-21T23:16:35.665680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:590:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:35.665798Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:601:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:35.665880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:35.691528Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:35.692787Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:35.692848Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:35.692897Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:35.693098Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:16:35.743411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:35.743583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:35.748610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:16:35.769739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:35.900118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:604:2510], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:16:36.005779Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd8g6qpeayrkgc9cs4vzy2nx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjcwZWEyZTEtMTVmYWVkNzUtODJmN2IxYmItNTk5NDEyMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:45.247063Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:296:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:45.247231Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:16:45.247306Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 29561, node 3 TClient is connected to server localhost:13753 2024-11-21T23:16:45.935643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:586:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:45.935768Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:599:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:45.935879Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:16:45.965948Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:16:45.967356Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:16:45.967428Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:16:45.967496Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:16:45.967721Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:16:46.023625Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:16:46.023825Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:16:46.029773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:16:46.053230Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:16:46.190920Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:603:2509], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:16:46.312423Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd8g71q91xftx1ax0pk7cjrh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=M2M4MjI3NTItY2IyNzQzOWMtOTc0NzNjM2QtYjU3NDIwNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:16:58.764485Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:89:2135], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:16:58.764950Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:16:58.765158Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19634, node 4 TClient is connected to server localhost:30991 2024-11-21T23:17:00.519524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:590:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:00.519625Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:600:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:00.520016Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found ... -11-21T23:17:39.972728Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 27396, node 7 TClient is connected to server localhost:7199 2024-11-21T23:17:40.951891Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:599:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:40.952083Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:587:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:40.952812Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:40.998264Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:40.999912Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:41.000034Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:41.000115Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:41.000479Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:17:41.050058Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:41.050270Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:41.057407Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:17:41.080884Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:41.221480Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:603:2509], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:17:41.416053Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd8g8qek5bq1he2q6wqbzztb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGZhMDU4OTUtNGMxMGEzNmQtNDU3ZjNlMGYtYzJmYTA0Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:17:54.641327Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:288:2331], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:17:54.731289Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:54.731567Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2560, node 8 TClient is connected to server localhost:26979 2024-11-21T23:17:55.764610Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:588:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:55.764752Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:598:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:55.764890Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:55.797538Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:55.800852Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:55.800975Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:55.801067Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:55.801623Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:17:55.852362Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:55.852594Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:55.862609Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:17:55.889736Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:56.018702Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:603:2509], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:17:56.173281Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jd8g95xgfrz1sgfj8r9aemhs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=ZTI2MjlhYzItYjVmYWJjOTEtYWJiNzczYjAtOWQ1ZTUyZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:18:13.236436Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:398:2381], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:13.237001Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:13.237260Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:18:13.876551Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:14.125056Z node 9 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T23:18:14.210548Z node 9 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T23:18:15.406569Z node 9 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 3027, node 9 TClient is connected to server localhost:20553 2024-11-21T23:18:16.464200Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:16.464341Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:16.464439Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:16.465615Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:36.675008Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:452:2382], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:36.675387Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:36.675573Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:18:37.260370Z node 11 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:37.439119Z node 11 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T23:18:37.496127Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T23:18:38.397489Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 1093, node 11 TClient is connected to server localhost:22878 2024-11-21T23:18:39.307631Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:39.307774Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:39.307876Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:39.308722Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:56.981554Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:511:2383], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:56.982099Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:56.982372Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:18:57.462056Z node 14 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:57.615558Z node 14 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T23:18:57.665537Z node 14 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T23:18:58.625110Z node 14 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 9801, node 14 TClient is connected to server localhost:30567 2024-11-21T23:18:59.400544Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:59.400676Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:59.400773Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:59.401583Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration >> KqpStats::MultiTxStatsFullExpYql >> BasicUsage::WriteSessionNoAvailableDatabase >> DstCreator::SamePartitionCount >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter >> KqpQuery::Now >> KqpScanSpilling::SpillingPragmaParseError >> KqpTypes::Time64Columns+EnableTableDatetime64 >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpLimits::CancelAfterRoTxWithFollowerLegacyDependedRead >> KqpQuery::QueryCacheTtl >> KqpExplain::Explain |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |86.9%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |86.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |86.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans >> KqpTypes::UnsafeTimestampCastV0 >> KqpJoinOrder::CanonizedJoinOrderTPCDS64+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin+ColumnStore >> DstCreator::WithSyncIndex [GOOD] |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |86.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard >> KqpJoinOrder::TPCH5+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2024-11-21T23:19:05.397356Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876120782981325:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:05.402933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001536/r3tmp/tmpTpcIyD/pdisk_1.dat 2024-11-21T23:19:05.871048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:05.871142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:05.872336Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:05.895032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3436 TServer::EnableGrpc on GrpcPort 30665, node 1 2024-11-21T23:19:06.133983Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:06.134010Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:06.134023Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:06.134128Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:06.598850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:06.619560Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:06.627596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231147024 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partition... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231146653 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231147024 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T23:19:07.133885Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:07.134020Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:07.134033Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:19:07.134767Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:19:09.385488Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231147024, tx_id: 281474976710658 } } } 2024-11-21T23:19:09.385905Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:19:09.388648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.390564Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2024-11-21T23:19:09.390579Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Replicated 2024-11-21T23:19:09.443667Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2024-11-21T23:19:09.444953Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231149474 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMe ... erTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2024-11-21T23:19:09.459058Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls request: /Root/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231149474 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231149474 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231149474 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231149474 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_WEAK } } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> KqpQueryPerf::Update+QueryService [GOOD] >> DstCreator::SamePartitionCount [GOOD] >> DstCreator::UnsupportedReplicationConsistency >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13497, MsgBus: 15288 2024-11-21T23:19:05.996142Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876117535310290:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:05.996205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002857/r3tmp/tmprTYMgj/pdisk_1.dat 2024-11-21T23:19:06.473367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:06.473463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:06.477604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13497, node 1 2024-11-21T23:19:06.512789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:19:06.512851Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:19:06.519217Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:06.590533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:06.590555Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:06.590567Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:06.590657Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15288 TClient is connected to server localhost:15288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:07.191110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.208640Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:07.224427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.397445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.590027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.660288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.726372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876134715181161:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:09.740398Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:09.812886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.922898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.999419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.035475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.116620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.188838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.279910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876139010148963:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.279981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.280337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876139010148968:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.284018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:10.298308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876139010148970:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:11.002577Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876117535310290:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:11.019981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQuery::YqlSyntaxV0 >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2024-11-21T23:18:44.963125Z :BasicWriteSession INFO: Random seed for debugging is 1732231124963094 2024-11-21T23:18:45.320209Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876031818679941:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:45.323336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:45.397767Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876031344182035:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:45.398754Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012fc/r3tmp/tmpQ6YKbq/pdisk_1.dat 2024-11-21T23:18:45.598458Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:45.606280Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:45.778682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:45.778813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:45.780981Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:45.787008Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:45.787277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:45.813997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:45.814077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16870, node 1 2024-11-21T23:18:45.822097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:45.823833Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:45.877239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0012fc/r3tmp/yandex5xSYll.tmp 2024-11-21T23:18:45.877282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0012fc/r3tmp/yandex5xSYll.tmp 2024-11-21T23:18:45.877515Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0012fc/r3tmp/yandex5xSYll.tmp 2024-11-21T23:18:45.877666Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:45.920637Z INFO: TTestServer started on Port 3306 GrpcPort 16870 TClient is connected to server localhost:3306 PQClient connected to localhost:16870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:46.286412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:18:46.395396Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2024-11-21T23:18:48.837582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876044703582791:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:48.837708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:48.853028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876044703582811:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:48.853109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876044703582813:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:48.853238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:48.856572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:18:48.879816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876044703582817:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:49.124443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:49.124768Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876044703582907:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:49.125926Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjhkOTBjMTEtMTExNDllNGItNGYxZmM4MzUtMmQ3YzJhMTY=, ActorId: [1:7439876044703582780:2302], ActorState: ExecuteState, TraceId: 01jd8gasqc6q8f7avwy0nef2r6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:49.128183Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:49.126895Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876044229084244:2286], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:49.127460Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTllMzA3YmItNGQ2NTZhZGEtMmZhNzFhZmEtYzVlYWEyY2M=, ActorId: [2:7439876044229084203:2279], ActorState: ExecuteState, TraceId: 01jd8gast71kwx5zkpgb4w1ddd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:49.128374Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:49.243203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:49.379237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16870", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:18:49.679433Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8gatb73ttrcw2dmvad3zk8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjk2M2NkODEtYWNiNWM2NDQtMmNjYjE2YzktMTYzOTUzYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439876048998550650:3020] 2024-11-21T23:18:50.322242Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876031818679941:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:50.322327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:50.398503Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876031344182035:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:50.398581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:18:55.608491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:16870 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topi ... DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:29177 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:19:09.361512Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:29177 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:19:09.866857Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2024-11-21T23:19:09.908681Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2024-11-21T23:19:09.909129Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2024-11-21T23:19:09.909168Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:29177 2024-11-21T23:19:09.963210Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T23:19:09.993076Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T23:19:09.993113Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T23:19:09.995763Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T23:19:09.995936Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:41524 2024-11-21T23:19:09.995953Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:41524 proto=v1 topic=test-topic durationSec=0 2024-11-21T23:19:09.995965Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:19:09.999749Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T23:19:09.999891Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T23:19:09.999902Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T23:19:09.999912Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T23:19:09.999934Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876134670485329:2463] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T23:19:10.003251Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876134670485329:2463] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T23:19:10.393881Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876134670485329:2463] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T23:19:10.395368Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876138965452676:2463] connected; active server actors: 1 2024-11-21T23:19:10.395534Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876134670485329:2463] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T23:19:10.395579Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876134670485329:2463] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T23:19:10.411212Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876138965452676:2463] disconnected; active server actors: 1 2024-11-21T23:19:10.411244Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876138965452676:2463] disconnected no session 2024-11-21T23:19:10.636727Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876134670485329:2463] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T23:19:10.636775Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876134670485329:2463] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T23:19:10.636794Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876134670485329:2463] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:19:10.636855Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:19:10.637916Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T23:19:10.638579Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:10.638637Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439876138965452714:2463], now have 1 active actors on pipe 2024-11-21T23:19:10.638735Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:19:10.638764Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:19:10.638880Z node 4 :PERSQUEUE INFO: new Cookie src|44360879-530a5337-e0233d70-feb93d5c_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:19:10.638986Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:19:10.639045Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:19:10.646557Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:19:10.646609Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:19:10.646744Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:19:10.647170Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|44360879-530a5337-e0233d70-feb93d5c_0 2024-11-21T23:19:10.649667Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732231150649 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:10.649783Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|44360879-530a5337-e0233d70-feb93d5c_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:19:10.649995Z :INFO: [] MessageGroupId [src] SessionId [src|44360879-530a5337-e0233d70-feb93d5c_0] Write session: close. Timeout = 0 ms 2024-11-21T23:19:10.650023Z :INFO: [] MessageGroupId [src] SessionId [src|44360879-530a5337-e0233d70-feb93d5c_0] Write session will now close 2024-11-21T23:19:10.650058Z :DEBUG: [] MessageGroupId [src] SessionId [src|44360879-530a5337-e0233d70-feb93d5c_0] Write session: aborting 2024-11-21T23:19:10.650480Z :INFO: [] MessageGroupId [src] SessionId [src|44360879-530a5337-e0233d70-feb93d5c_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:19:10.650514Z :DEBUG: [] MessageGroupId [src] SessionId [src|44360879-530a5337-e0233d70-feb93d5c_0] Write session: destroy 2024-11-21T23:19:10.652048Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|44360879-530a5337-e0233d70-feb93d5c_0 grpc read done: success: 0 data: 2024-11-21T23:19:10.652074Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|44360879-530a5337-e0233d70-feb93d5c_0 grpc read failed 2024-11-21T23:19:10.652093Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|44360879-530a5337-e0233d70-feb93d5c_0 grpc closed 2024-11-21T23:19:10.652110Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|44360879-530a5337-e0233d70-feb93d5c_0 is DEAD 2024-11-21T23:19:10.654000Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:19:10.654858Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:10.654900Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876138965452714:2463] destroyed 2024-11-21T23:19:10.654951Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created 2024-11-21T23:19:11.885213Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876143260420086:2487], TxId: 281474976715687, task: 1, CA Id [3:7439876143260420084:2487]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T23:19:11.934096Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876143260420086:2487], TxId: 281474976715687, task: 1, CA Id [3:7439876143260420084:2487]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:12.003927Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876143260420086:2487], TxId: 281474976715687, task: 1, CA Id [3:7439876143260420084:2487]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:12.071382Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876143260420086:2487], TxId: 281474976715687, task: 1, CA Id [3:7439876143260420084:2487]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:12.202523Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876143260420086:2487], TxId: 281474976715687, task: 1, CA Id [3:7439876143260420084:2487]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> BasicUsage::ReadMirrored [GOOD] >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2024-11-21T23:18:41.737314Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1732231121737281 2024-11-21T23:18:42.126241Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876020566801773:2193];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:42.127072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:42.185754Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876019355006276:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:42.185806Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:42.388665Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:42.395615Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001305/r3tmp/tmp4N059i/pdisk_1.dat 2024-11-21T23:18:42.560753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:42.560913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:42.564724Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:42.577630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9100, node 1 2024-11-21T23:18:42.593155Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:42.598185Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:42.605745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:42.605817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:42.609988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:42.617100Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:42.628674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001305/r3tmp/yandexrutFaQ.tmp 2024-11-21T23:18:42.628703Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001305/r3tmp/yandexrutFaQ.tmp 2024-11-21T23:18:42.628978Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001305/r3tmp/yandexrutFaQ.tmp 2024-11-21T23:18:42.629061Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:42.669640Z INFO: TTestServer started on Port 29335 GrpcPort 9100 TClient is connected to server localhost:29335 PQClient connected to localhost:9100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:42.916127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:18:45.489088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876033451704477:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.489226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.490177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876033451704489:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.500116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876032239908461:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.500193Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876032239908474:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.500254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.501225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:18:45.524593Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2024-11-21T23:18:45.535360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876033451704491:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:45.535542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876032239908482:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:45.815737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:45.864451Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876032239908525:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:45.864735Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzJlOTMzYWMtZmRlZjBjYTEtOWU3YmYyNzAtYzZkNzc2NWQ=, ActorId: [2:7439876032239908443:2280], ActorState: ExecuteState, TraceId: 01jd8gapfr8t4cxxdyhdkh6f45, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:45.866722Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:45.870928Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876033451704606:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:45.872330Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjkyYmUwY2MtNTFjZmJjYjQtOGU3MjVjNzktYjIyYmE5ZmU=, ActorId: [1:7439876033451704474:2302], ActorState: ExecuteState, TraceId: 01jd8gapen29s74xckx6dmpjrr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:45.872719Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:45.958032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:46.208579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:9100", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:18:46.705353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8gaqbrf5fqbj6wjbkf3njw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmVlMzVlYjItNjg5YTNhN2ItNTljM2ZkN2MtYTM2MmY1M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439876037746672346:3025] 2024-11-21T23:18:47.119594Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876020566801773:2193];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:47.119670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:47.186248Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:74398760193550062 ... Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:11872 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: 2024-11-21T23:19:07.071983Z node 3 :PERSQUEUE INFO: proxy answer Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:11872 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:19:07.588588Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2024-11-21T23:19:07.606569Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2024-11-21T23:19:07.608667Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2024-11-21T23:19:07.608707Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:11872 2024-11-21T23:19:07.617371Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T23:19:07.619391Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2024-11-21T23:19:07.619423Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2024-11-21T23:19:07.620616Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2024-11-21T23:19:07.620714Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:45312 2024-11-21T23:19:07.620731Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:45312 proto=v1 topic=test-topic durationSec=0 2024-11-21T23:19:07.620739Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2024-11-21T23:19:07.623677Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2024-11-21T23:19:07.623795Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2024-11-21T23:19:07.623806Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2024-11-21T23:19:07.623815Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2024-11-21T23:19:07.623832Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876128662227711:2473] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2024-11-21T23:19:07.643058Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876128662227711:2473] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2024-11-21T23:19:07.829046Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876128662227711:2473] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2024-11-21T23:19:07.834706Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876128662227750:2473] connected; active server actors: 1 2024-11-21T23:19:07.834829Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876128662227711:2473] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2024-11-21T23:19:07.834854Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876128662227711:2473] (SourceId=src, PreferedPartition=(NULL)) Update the table 2024-11-21T23:19:07.866270Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876128662227750:2473] disconnected; active server actors: 1 2024-11-21T23:19:07.866318Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876128662227750:2473] disconnected no session 2024-11-21T23:19:08.073931Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876128662227711:2473] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2024-11-21T23:19:08.073971Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876128662227711:2473] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2024-11-21T23:19:08.073990Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876128662227711:2473] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:19:08.074022Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:19:08.075676Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:08.075722Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439876132957195086:2473], now have 1 active actors on pipe 2024-11-21T23:19:08.076087Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T23:19:08.076335Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:19:08.076368Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:19:08.076456Z node 4 :PERSQUEUE INFO: new Cookie src|b83bedf0-d680142-257cc792-3d061f58_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:19:08.076559Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:19:08.076637Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:19:08.077325Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:19:08.077353Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:19:08.077428Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:19:08.077854Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|b83bedf0-d680142-257cc792-3d061f58_0 2024-11-21T23:19:08.079473Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732231148079 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:08.079640Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|b83bedf0-d680142-257cc792-3d061f58_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:19:08.079864Z :INFO: [] MessageGroupId [src] SessionId [src|b83bedf0-d680142-257cc792-3d061f58_0] Write session: close. Timeout = 0 ms 2024-11-21T23:19:08.079922Z :INFO: [] MessageGroupId [src] SessionId [src|b83bedf0-d680142-257cc792-3d061f58_0] Write session will now close 2024-11-21T23:19:08.079969Z :DEBUG: [] MessageGroupId [src] SessionId [src|b83bedf0-d680142-257cc792-3d061f58_0] Write session: aborting 2024-11-21T23:19:08.080442Z :INFO: [] MessageGroupId [src] SessionId [src|b83bedf0-d680142-257cc792-3d061f58_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:19:08.080478Z :DEBUG: [] MessageGroupId [src] SessionId [src|b83bedf0-d680142-257cc792-3d061f58_0] Write session: destroy 2024-11-21T23:19:08.094549Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|b83bedf0-d680142-257cc792-3d061f58_0 grpc read done: success: 0 data: 2024-11-21T23:19:08.094576Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b83bedf0-d680142-257cc792-3d061f58_0 grpc read failed 2024-11-21T23:19:08.094607Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b83bedf0-d680142-257cc792-3d061f58_0 grpc closed 2024-11-21T23:19:08.094628Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|b83bedf0-d680142-257cc792-3d061f58_0 is DEAD 2024-11-21T23:19:08.096639Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:19:08.100193Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:08.100244Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876132957195086:2473] destroyed 2024-11-21T23:19:08.100293Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2024-11-21T23:19:08.172016Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: 2024-11-21T23:19:11.044440Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:19:11.044470Z node 3 :IMPORT WARN: Table profiles were not loaded >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/dl5p/00100b/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 17595, MsgBus: 5906 2024-11-21T23:19:06.007164Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876123149525053:2122];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:06.012759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00100b/r3tmp/tmpGkzKYo/pdisk_1.dat 2024-11-21T23:19:06.405374Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17595, node 1 2024-11-21T23:19:06.438486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:06.443836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:06.462387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:06.542425Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:06.542442Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:06.542446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:06.542506Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5906 TClient is connected to server localhost:5906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:07.249252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.299850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.478052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.759691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.855410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.898523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876136034428556:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:09.957222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.183560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.233240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.336593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.369991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.414417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.488397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.601660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876140329396354:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.601747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.602154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876140329396359:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.607368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:10.630252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876140329396361:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:11.051095Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876123149525053:2122];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:11.051596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 (StructType '('"Key" $3) '('"Value" $4))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($20) (block '( (let $21 (lambda '($22) (block '( (let $23 (VariantType (TupleType $5 $5))) (let $24 (Variant $22 '0 $23)) (let $25 (Variant $22 '1 $23)) (return $24 $25) )))) (return (FromFlow (MultiMap (ToFlow $20) $21))) ))) '('('"_logical_id" '688) '('"_id" '"597c3310-57f0546f-602736b3-cbe9d85c")))) (let $7 (DqCnUnionAll (TDqOutput $6 '1))) (let $8 '('('"_logical_id" '531) '('"_id" '"3849372-3925b8ea-89eb53a-54623493") '('"_wide_channels" $5))) (let $9 (DqPhyStage '($7) (lambda '($26) (block '( (let $27 (lambda '($28) (Member $28 '"Key") (Member $28 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $26) $27))) ))) $8)) (let $10 (DqCnMap (TDqOutput $6 '0))) (let $11 (DqCnBroadcast (TDqOutput $9 '0))) (let $12 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $13 '('('"_logical_id" '603) '('"_id" '"27d04637-8f716c61-6bfe456e-116d08ce") '('"_wide_channels" $12))) (let $14 (DqPhyStage '($10 $11) (lambda '($29 $30) (block '( (let $31 (lambda '($38) (block '( (let $39 (Member $38 '"Value")) (return (Member $38 '"Key") $39 $39 (Exists $39)) )))) (let $32 (lambda '($44 $45 $46 $47) $44 $45 $46)) (let $33 (lambda '($50 $51) $50 $51 $51)) (let $34 '('"2")) (let $35 '('0 '0 '1 '1)) (let $36 '('0 '"2" '1 '"3")) (let $37 (GraceJoinCore (WideMap (WideFilter (ExpandMap (ToFlow $29) $31) (lambda '($40 $41 $42 $43) $43)) $32) (WideMap (WideFilter (ToFlow $30) (lambda '($48 $49) (Exists $49))) $33) 'Inner $34 $34 $35 $36 '('"t1.Value") '('"t2.Value") '('"Broadcast"))) (return (FromFlow (WideSort $37 '('('0 (Bool 'true)))))) ))) $13)) (let $15 (DqCnMerge (TDqOutput $14 '0) '('('0 '"Asc")))) (let $16 (DqPhyStage '($15) (lambda '($52) (FromFlow (NarrowMap (ToFlow $52) (lambda '($53 $54 $55 $56) (AsStruct '('"t1.Key" $53) '('"t1.Value" $54) '('"t2.Key" $55) '('"t2.Value" $56)))))) '('('"_logical_id" '615) '('"_id" '"1331ad7a-5e043cd5-d50a6cad-9bec9c00")))) (let $17 '($6 $9 $14 $16)) (let $18 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $19 (DqCnResult (TDqOutput $16 '0) $18)) (return (KqpPhysicalQuery '((KqpPhysicalTx $17 '($19) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $12) '0 '0)) '('('"type" '"query")))) ) >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> KqpQuery::Now [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore [GOOD] >> KqpExplain::SortStage ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2024-11-21T23:18:33.037747Z :PropagateSessionClosed INFO: Random seed for debugging is 1732231113037713 2024-11-21T23:18:33.737676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875982262667072:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:33.737766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:33.909823Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875980189705280:2105];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:34.136077Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:34.136375Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:34.135918Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001327/r3tmp/tmp1lAj2q/pdisk_1.dat 2024-11-21T23:18:34.404232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:34.404342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:34.406122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:34.406174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:34.411739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:34.412824Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:34.420020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:34.491596Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10649, node 1 2024-11-21T23:18:34.701130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001327/r3tmp/yandex9A7zsC.tmp 2024-11-21T23:18:34.701162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001327/r3tmp/yandex9A7zsC.tmp 2024-11-21T23:18:34.701344Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001327/r3tmp/yandex9A7zsC.tmp 2024-11-21T23:18:34.701474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:34.782283Z INFO: TTestServer started on Port 28149 GrpcPort 10649 TClient is connected to server localhost:28149 PQClient connected to localhost:10649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:35.088782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:18:37.916434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875999442537258:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.916439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875997369574704:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.916573Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875997369574723:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.916667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.917164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875999442537251:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.917253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.923345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:18:37.934404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875999442537325:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.934581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:37.950646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875997369574725:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:37.958082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875999442537280:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:38.198583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:38.201690Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876003737504684:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:38.202407Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWJiMjZjYmUtYWIwZDllMWEtYWY2YTI2YzUtY2Y1MDY2MzI=, ActorId: [1:7439875999442537248:2303], ActorState: ExecuteState, TraceId: 01jd8gaf2e7yegbch75vnh0nah, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:38.204564Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:38.205803Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876001664542064:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:38.207621Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmY5NDE1MjYtY2NkYzcyYjgtOGUyMWJiZjMtYTFlZmM1MzE=, ActorId: [2:7439875997369574694:2280], ActorState: ExecuteState, TraceId: 01jd8gaf2sfrb8fay37cbmy0h6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:38.208134Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:38.353581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:38.490062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:10649", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:18:38.714997Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875982262667072:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:38.715057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:38.870258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8gafsw3jvsyr3tr1cmq38y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmY5NGVhOGMtZWM3OTVlYjgtZjkyNWIyNjEtYThjZjc1M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T2 ... ": "ipv6:[::1]:47042", "_ip": "ipv6:[::1]:47042" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2024-11-21T23:19:11.803000Z WriteTime: 2024-11-21T23:19:11.877000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:47042", "_ip": "ipv6:[::1]:47042" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 3 messages in this event 2024-11-21T23:19:12.008034Z :DEBUG: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] [] The application data is transferred to the client. Number of messages 3, size 1050 bytes 2024-11-21T23:19:12.008076Z :DEBUG: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] [] Returning serverBytesSize = 0 to budget 2024-11-21T23:19:12.009470Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_12702276641153886616_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1403 } } 2024-11-21T23:19:12.009668Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_12702276641153886616_v1 got read request: guid# 8411c7c9-e299c6a7-bd44e7d6-b6f0134e 2024-11-21T23:19:12.016038Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|8d65f14-6631c598-1ac4113e-3d301170_0] Write session will now close 2024-11-21T23:19:12.016115Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|8d65f14-6631c598-1ac4113e-3d301170_0] Write session: aborting 2024-11-21T23:19:12.016736Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|8d65f14-6631c598-1ac4113e-3d301170_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2024-11-21T23:19:12.016787Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|8d65f14-6631c598-1ac4113e-3d301170_0] Write session: destroy 2024-11-21T23:19:12.016967Z :INFO: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] Closing read session. Close timeout: 18446744073709.551615s 2024-11-21T23:19:12.017039Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2024-11-21T23:19:12.017082Z :INFO: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 776 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:12.022926Z :INFO: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:12.023015Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2024-11-21T23:19:12.034659Z :INFO: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 782 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:12.034766Z :INFO: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:12.034839Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc2:0:1:4:0 2024-11-21T23:19:12.036568Z :INFO: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 794 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:12.036720Z :NOTICE: [/Root] [/Root] [172b5b64-1d9cad7c-bd1aa206-d31908e8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:19:12.042911Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|8d65f14-6631c598-1ac4113e-3d301170_0 grpc read done: success: 0 data: 2024-11-21T23:19:12.042944Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|8d65f14-6631c598-1ac4113e-3d301170_0 grpc read failed 2024-11-21T23:19:12.042975Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|8d65f14-6631c598-1ac4113e-3d301170_0 grpc closed 2024-11-21T23:19:12.042999Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|8d65f14-6631c598-1ac4113e-3d301170_0 is DEAD 2024-11-21T23:19:12.043907Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:19:12.044067Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_12702276641153886616_v1 grpc read done: success# 0, data# { } 2024-11-21T23:19:12.044086Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_12702276641153886616_v1 grpc read failed 2024-11-21T23:19:12.044108Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_12702276641153886616_v1 grpc closed 2024-11-21T23:19:12.044174Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_12702276641153886616_v1 is DEAD 2024-11-21T23:19:12.048099Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7439876145187709118:2564] disconnected; active server actors: 1 2024-11-21T23:19:12.048148Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7439876145187709118:2564] client user disconnected session shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.048231Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876145187709119:2564] disconnected; active server actors: 1 2024-11-21T23:19:12.048248Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876145187709119:2564] client user disconnected session shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.048298Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7439876145187709120:2564] disconnected; active server actors: 1 2024-11-21T23:19:12.048313Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7439876145187709120:2564] client user disconnected session shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.060547Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:12.060608Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7439876145187709266:2584] destroyed 2024-11-21T23:19:12.060632Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:12.060653Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.060677Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7439876145187709132:2570] destroyed 2024-11-21T23:19:12.060701Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:12.060716Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.060738Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7439876145187709135:2572] destroyed 2024-11-21T23:19:12.060758Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:12.060773Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.060790Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876145187709134:2571] destroyed 2024-11-21T23:19:12.060841Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.060861Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.060879Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_12702276641153886616_v1 2024-11-21T23:19:12.061623Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:19:12.449812Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:19:13.074725Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876153777643948:2596], TxId: 281474976715707, task: 1, CA Id [3:7439876153777643946:2596]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T23:19:13.114579Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876153777643948:2596], TxId: 281474976715707, task: 1, CA Id [3:7439876153777643946:2596]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:13.208648Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876153777643948:2596], TxId: 281474976715707, task: 1, CA Id [3:7439876153777643946:2596]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:13.285610Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876153777643948:2596], TxId: 281474976715707, task: 1, CA Id [3:7439876153777643946:2596]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:13.419294Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876153777643948:2596], TxId: 281474976715707, task: 1, CA Id [3:7439876153777643946:2596]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:13.540163Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876153777643948:2596], TxId: 281474976715707, task: 1, CA Id [3:7439876153777643946:2596]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:13.684187Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715708. Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:19:13.684350Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7439876153777644004:2595] TxId: 281474976715708. Ctx: { TraceId: 01jd8gbhddamff12zx3y1g05xy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDQ4ZDUzMDYtNDcwN2VkMmEtY2EyNmE5ZWQtNDVkZmVhMDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2024-11-21T23:19:13.686384Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDQ4ZDUzMDYtNDcwN2VkMmEtY2EyNmE5ZWQtNDVkZmVhMDg=, ActorId: [3:7439876153777643935:2595], ActorState: ExecuteState, TraceId: 01jd8gbhddamff12zx3y1g05xy, Create QueryResponse for error on request, msg: 2024-11-21T23:19:13.687889Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jd8gbhvn2p4f3w3h3kz6fyq3" } } YdbStatus: UNAVAILABLE ConsumedRu: 299 } 2024-11-21T23:19:13.775933Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876153777643948:2596], TxId: 281474976715707, task: 1, CA Id [3:7439876153777643946:2596]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> KqpExplain::Explain [GOOD] >> KqpExplain::ComplexJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/dl5p/000fe5/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 5675, MsgBus: 64309 2024-11-21T23:19:08.149046Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876130077007826:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:08.149093Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000fe5/r3tmp/tmp59zc8t/pdisk_1.dat 2024-11-21T23:19:08.936345Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:08.949102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:08.949209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:08.964202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5675, node 1 2024-11-21T23:19:09.204649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:09.204669Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:09.204678Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:09.204757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64309 TClient is connected to server localhost:64309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:10.047650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.118424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.476007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.688456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.784721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:13.191187Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876130077007826:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:13.198658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:13.683753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876151551846031:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.683887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.008505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.043461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.080690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.119999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.161919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.231507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.353960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876155846813840:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.354034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.354216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876155846813845:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.358822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:14.371081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876155846813847:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:15.826171Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876160141781479:2470], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2024-11-21T23:19:15.828350Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjk0MmViNjktZGZhOTliN2MtOTdlNTVkZjQtZTQ4YzVjNGU=, ActorId: [1:7439876160141781472:2466], ActorState: ExecuteState, TraceId: 01jd8gbm1hdw7p8tgj59kwektx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQuery::QueryTimeout >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> DataShardWrite::UpsertPrepared+Volatile >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> DataShardWrite::UpsertImmediate >> KqpQuery::QueryResultsTruncated >> DataShardWrite::WriteImmediateBadRequest >> DstCreator::UnsupportedReplicationConsistency [GOOD] >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |87.0%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCachePermissionsLoss ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 14121, MsgBus: 16633 2024-11-21T23:17:35.478104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875731798282963:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:35.485475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f6e/r3tmp/tmpXDOfYi/pdisk_1.dat 2024-11-21T23:17:36.104566Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:36.108487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:36.108600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:36.114890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14121, node 1 2024-11-21T23:17:36.194854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:36.194881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:36.194891Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:36.194990Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16633 TClient is connected to server localhost:16633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:36.785664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.811260Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:36.820800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:36.984677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:37.145882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:37.233038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.517855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875748978153847:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.518277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.605513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.639634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.678172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.748947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.791087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.887431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.942087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875748978154351:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.942173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875748978154356:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.942181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:39.946419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:39.967609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875748978154358:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:40.477941Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875731798282963:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:40.478010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:41.088377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.123776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.153701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.207652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.242192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.401031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.439850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.479334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.526040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.564920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.637155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.687372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.746772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.663630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T23:17:42.713587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.746231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.799334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.839517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.871521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... access permissions } 2024-11-21T23:19:05.405803Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:05.431741Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876119940950225:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:07.501361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:07.607818Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:19:07.699742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:19:07.751816Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:19:07.807090Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T23:19:08.054581Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T23:19:08.108830Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T23:19:08.196182Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T23:19:08.248855Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T23:19:08.302598Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T23:19:08.365080Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T23:19:08.446879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T23:19:08.497250Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.448459Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T23:19:09.530524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.619601Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.676143Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.758926Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.837872Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T23:19:09.907029Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.018241Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.143326Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.280157Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.389055Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.540561Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.651911Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.734256Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.831266Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T23:19:10.923951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.044178Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.158428Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.248258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.331989Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.408367Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.509456Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.619602Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.701451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.112211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T23:19:12.170230Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.309283Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.363889Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.500144Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.596414Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.691297Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.792699Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.885947Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.113183Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.205544Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.297316Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.400002Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.687174Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:19:14.687216Z node 5 :IMPORT WARN: Table profiles were not loaded >> TColumnShardTestSchema::ForgetWithLostAnswer |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |87.0%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/async_io/ut/ydb-library-yql-providers-solomon-async_io-ut |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |87.0%| [LD] {RESULT} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/actors/common/ut/ydb-library-yql-dq-actors-common-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::UnsupportedReplicationConsistency [GOOD] Test command err: 2024-11-21T23:19:07.811016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876128928678326:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:07.819710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00148f/r3tmp/tmpVfP8O8/pdisk_1.dat 2024-11-21T23:19:08.504714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:08.504803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:08.508011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:08.587032Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17866 TServer::EnableGrpc on GrpcPort 4154, node 1 2024-11-21T23:19:09.036100Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:09.036122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:09.036129Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:09.036204Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:09.601755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.625809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231149754 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231149663 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231149754 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 7205759404664... (TRUNCATED) 2024-11-21T23:19:09.789436Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:09.789576Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:09.789588Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:19:09.793783Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:19:12.248996Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231149754, tx_id: 281474976710658 } } } 2024-11-21T23:19:12.249415Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:19:12.266188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.267787Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2024-11-21T23:19:12.267802Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Table 2024-11-21T23:19:12.348218Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2024-11-21T23:19:12.348245Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231149754 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 Partitio... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231152365 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 ... (TRUNCATED) 2024-11-21T23:19:13.509515Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876152522081868:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:13.509570Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00148f/r3tmp/tmpA5Yw2u/pdisk_1.dat 2024-11-21T23:19:13.744115Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:13.807622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:13.807699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:13.816225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7209 TServer::EnableGrpc on GrpcPort 7528, node 2 2024-11-21T23:19:14.190992Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:14.191014Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:14.191020Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:14.191111Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:14.553825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:14.570939Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:14.580474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.663642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231154605 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231154745 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231154605 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231154745 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "Src" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPa... (TRUNCATED) 2024-11-21T23:19:14.716196Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:14.716285Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } } 2024-11-21T23:19:14.716294Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2024-11-21T23:19:14.718865Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2024-11-21T23:19:18.514745Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876152522081868:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:18.514853Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:18.794902Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1732231154696, tx_id: 281474976710658 } } } 2024-11-21T23:19:18.795170Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2024-11-21T23:19:18.796650Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2024-11-21T23:19:18.797656Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1732231154745 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY Consistency: CONSISTENCY_STRONG } } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2024-11-21T23:19:18.797896Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Unsupported replication consistency: 1 >> KqpStats::MultiTxStatsFullYql >> KqpQuery::DdlInDataQuery >> KqpLimits::QueryExecTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 21411, MsgBus: 24687 2024-11-21T23:19:08.265344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876131545208097:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:08.265407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028c6/r3tmp/tmpV248dy/pdisk_1.dat 2024-11-21T23:19:08.817328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:08.817416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:08.820305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:08.866119Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21411, node 1 2024-11-21T23:19:09.079167Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:09.079189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:09.079196Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:09.079286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24687 TClient is connected to server localhost:24687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:09.810287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:13.008574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876148725077696:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.008691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.266705Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876131545208097:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:13.266772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:13.365564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.600841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876153020045095:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.602713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.610548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876153020045100:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.618746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:19:13.630787Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2024-11-21T23:19:13.631515Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876153020045102:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 28051, MsgBus: 12426 2024-11-21T23:19:15.056532Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876160477822483:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028c6/r3tmp/tmp6wgVNp/pdisk_1.dat 2024-11-21T23:19:15.098825Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:15.148860Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:15.175815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:15.175911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:15.177612Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28051, node 2 2024-11-21T23:19:15.273656Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:15.273685Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:15.273694Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:15.273804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12426 TClient is connected to server localhost:12426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:16.194800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:16.206903Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:19.409672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876177657692141:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:19.409881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |87.0%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |87.0%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpQuery::UdfTerminate >> KqpLimits::ManyPartitions >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> KqpStats::SysViewClientLost [FAIL] >> KqpStats::SysViewCancelled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 13144, MsgBus: 20455 2024-11-21T23:17:48.193818Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875789162769289:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:48.193876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00290f/r3tmp/tmpJSbW3K/pdisk_1.dat 2024-11-21T23:17:48.691169Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:48.734381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:48.734494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:48.774074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13144, node 1 2024-11-21T23:17:48.870152Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:48.870180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:48.870186Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:48.870286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20455 TClient is connected to server localhost:20455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:49.571825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:49.595232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:49.605163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:49.759769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:49.918205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:50.000069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:51.731386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875802047672698:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:51.731510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:51.980613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:52.007313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:52.047050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:52.072647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:52.114178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:52.186191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:52.233455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875806342640489:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:52.233533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:52.233722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875806342640494:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:52.237058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:52.251640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875806342640496:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:53.174679Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875789162769289:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:53.174750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:53.265425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:54.645255Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439875814932575974:2496] TxId: 281474976710672. Ctx: { TraceId: 01jd8g94h3c3grwr2x6257g02y, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ1ZTMxMTQtNjFiYTBiZjUtMjgyYzRhNjQtYzU5YjQ3MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. } 2024-11-21T23:17:54.664739Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439875814932575987:2510], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NjQ1ZTMxMTQtNjFiYTBiZjUtMjgyYzRhNjQtYzU5YjQ3MTc=. TraceId : 01jd8g94h3c3grwr2x6257g02y. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7439875814932575974:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T23:17:54.665657Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439875814932575982:2506], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ1ZTMxMTQtNjFiYTBiZjUtMjgyYzRhNjQtYzU5YjQ3MTc=. CustomerSuppliedId : . TraceId : 01jd8g94h3c3grwr2x6257g02y. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439875814932575974:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T23:17:54.669319Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439875814932575985:2508], TxId: 281474976710672, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjQ1ZTMxMTQtNjFiYTBiZjUtMjgyYzRhNjQtYzU5YjQ3MTc=. TraceId : 01jd8g94h3c3grwr2x6257g02y. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439875814932575974:2496], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T23:17:54.672187Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjQ1ZTMxMTQtNjFiYTBiZjUtMjgyYzRhNjQtYzU5YjQ3MTc=, ActorId: [1:7439875814932575956:2496], ActorState: ExecuteState, TraceId: 01jd8g94h3c3grwr2x6257g02y, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 16286, MsgBus: 30306 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00290f/r3tmp/tmpn9Kfgn/pdisk_1.dat 2024-11-21T23:17:55.500143Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:17:55.500644Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:55.532628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:55.532717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16286, node 2 2024-11-21T23:17:55.535595Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:17:55.586987Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:55.587018Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:55.587025Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:55.587123Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30306 TClient is connected to server localhost:30306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 ... /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:59.013244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439875836393633093:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:59.013280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:59.016378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:59.024820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439875836393633095:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:00.363410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:10.472960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:10.472991Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:49.758225Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzRlZTA4NDgtYjE1MTU2YWUtZDQ2OGViNzQtMmU5NmZiMzg=, ActorId: [2:7439876051141999722:2722], ActorState: ExecuteState, TraceId: 01jd8gathdd6smhn6byms8dxef, Create QueryResponse for error on request, msg:
: Error: Task execution timeout 96ms exceeded, terminating after 101ms 2024-11-21T23:18:49.944097Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439876051141999810:2722] TxId: 281474976715674. Ctx: { TraceId: 01jd8gatnv4945j0na38rdathf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzRlZTA4NDgtYjE1MTU2YWUtZDQ2OGViNzQtMmU5NmZiMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 156ms during execution } ] 2024-11-21T23:18:49.945150Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439876051141999828:2754], TxId: 281474976715674, task: 9. Ctx: { SessionId : ydb://session/3?node_id=2&id=MzRlZTA4NDgtYjE1MTU2YWUtZDQ2OGViNzQtMmU5NmZiMzg=. TraceId : 01jd8gatnv4945j0na38rdathf. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439876051141999810:2722], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:18:49.992522Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7439876051141999825:2752], TxId: 281474976715674, task: 7. Ctx: { SessionId : ydb://session/3?node_id=2&id=MzRlZTA4NDgtYjE1MTU2YWUtZDQ2OGViNzQtMmU5NmZiMzg=. CustomerSuppliedId : . TraceId : 01jd8gatnv4945j0na38rdathf. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7439876051141999810:2722], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:18:49.995980Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzRlZTA4NDgtYjE1MTU2YWUtZDQ2OGViNzQtMmU5NmZiMzg=, ActorId: [2:7439876051141999722:2722], ActorState: ExecuteState, TraceId: 01jd8gatnv4945j0na38rdathf, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 156ms during execution Trying to start YDB, gRPC: 22928, MsgBus: 21023 2024-11-21T23:18:51.129457Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876057688672213:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:51.129517Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00290f/r3tmp/tmpg5Jmwf/pdisk_1.dat 2024-11-21T23:18:51.231152Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22928, node 3 2024-11-21T23:18:51.267835Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:51.267930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:51.269301Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:51.321274Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:51.321299Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:51.321310Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:51.321434Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21023 TClient is connected to server localhost:21023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:51.834637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:51.844872Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:18:51.853095Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:51.929777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:52.114500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:52.198882Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:54.446979Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876070573575810:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:54.447095Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:54.487557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:54.517673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:54.544429Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:54.612081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:54.646050Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:54.715229Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:54.751695Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876070573576309:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:54.751789Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:54.751945Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876070573576314:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:54.754994Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:54.763942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876070573576316:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:56.129578Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876057688672213:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:56.129673Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:06.223060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:19:06.223088Z node 3 :IMPORT WARN: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=3&id=NGVjNmU5MjItOTI5NjAzY2ItNWRlYzkxYjMtY2E2NjM3Zg== >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> KqpQueryPerf::IndexUpsert+QueryService >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::Decimal+QueryService >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> KqpJoinOrder::TPCH8+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> KqpExplain::ComplexJoin [GOOD] >> KqpExplain::CompoundKeyRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::UnsafeTimestampCastV1 [GOOD] Test command err: Trying to start YDB, gRPC: 15449, MsgBus: 17572 2024-11-21T23:19:09.618495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876137331766573:2199];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:09.619107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028b9/r3tmp/tmpcmUUxW/pdisk_1.dat 2024-11-21T23:19:10.241634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:10.241734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:10.243332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:10.248497Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15449, node 1 2024-11-21T23:19:10.487845Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:10.487867Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:10.487874Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:10.487977Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17572 TClient is connected to server localhost:17572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:11.440439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:11.493742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:11.690674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:11.923647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:12.109907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:14.646813Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876137331766573:2199];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:14.669134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:14.697815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876158806604584:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.697930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:15.042915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:15.147952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:15.175833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:15.252396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:15.299677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:15.375833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:15.482713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876163101572388:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:15.482824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:15.483367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876163101572393:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:15.488304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:15.508757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876163101572395:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:18.189058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 Trying to start YDB, gRPC: 26056, MsgBus: 61518 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028b9/r3tmp/tmp1XYCMB/pdisk_1.dat 2024-11-21T23:19:20.332624Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876182041624925:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:20.332937Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:20.418500Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:20.433427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:20.433515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:20.440473Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26056, node 2 2024-11-21T23:19:20.630994Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:20.631019Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:20.631028Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:20.631128Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61518 TClient is connected to server localhost:61518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:19:21.474065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.508200Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:21.521412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:21.638107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:21.851432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.955528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.478909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876199221495672:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:24.479033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:24.533242Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.630970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.711412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.800766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.893306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.990228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.092071Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876182041624925:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:25.092125Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:25.120645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876203516463479:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.120738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.120993Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876203516463484:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.124640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:25.150689Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876203516463486:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:27.095186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.197387Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876212106398516:2475], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2024-11-21T23:19:27.199269Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzJkNjIwMzYtZjMwMGI3NWEtNjQ3MzhkMTQtY2ViNTI1NzA=, ActorId: [2:7439876212106398409:2463], ActorState: ExecuteState, TraceId: 01jd8gbz68bkeye3s5w079tbwy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> TColumnShardTestSchema::InternalTTL_Types >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryExplain >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::QueryCancelWrite >> KqpQuery::YqlTableSample [GOOD] >> KqpStats::DataQueryMulti >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::DeleteWhereInSubquery >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/dl5p/001029/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 4523, MsgBus: 6622 2024-11-21T23:18:23.845638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875937851433062:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:23.845767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001029/r3tmp/tmpmksbTX/pdisk_1.dat 2024-11-21T23:18:24.508769Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:24.516999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:24.517077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:24.524348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4523, node 1 2024-11-21T23:18:24.827467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:24.827493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:24.827513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:24.827605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6622 TClient is connected to server localhost:6622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:25.754617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.808621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.987729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:26.289812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:26.386046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:28.916900Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875937851433062:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:29.131402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875959326271123:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:29.131578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:29.131898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:29.183957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:29.268059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:29.350348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:29.440737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:29.486937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:29.519547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:29.598659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875963621238927:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:29.604266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:29.607062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875963621238933:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:29.611498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:29.622759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875963621238936:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:39.482579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:39.482607Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"1fdcf6e4-ef05f38e-42c5c5da-5e029c4f") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"948bdda2-dcf8dbde-607ef5d7-93ccb776") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"2129337-adaec718-8f516542-6e7a477a")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2024-11-21T23:19:29.781539Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976710971. Error: [TEvError] Spilling Service not started 2024-11-21T23:19:29.806168Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439876221319282519:4425], TxId: 281474976710971, task: 2. Ctx: { TraceId : 01jd8gc08253xec2y4qqrjxwgt. SessionId : ydb://session/3?node_id=1&id=Mzg5MmRhMzQtMTQ4N2E0YTgtYmYzNmQxNDktZGM4Yjc3MTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] Spilling Service not started }. 2024-11-21T23:19:29.822665Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439876221319282518:4424], TxId: 281474976710971, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd8gc08253xec2y4qqrjxwgt. SessionId : ydb://session/3?node_id=1&id=Mzg5MmRhMzQtMTQ4N2E0YTgtYmYzNmQxNDktZGM4Yjc3MTI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T23:19:29.823248Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439876221319282520:4426], TxId: 281474976710971, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jd8gc08253xec2y4qqrjxwgt. SessionId : ydb://session/3?node_id=1&id=Mzg5MmRhMzQtMTQ4N2E0YTgtYmYzNmQxNDktZGM4Yjc3MTI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2024-11-21T23:19:29.875662Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mzg5MmRhMzQtMTQ4N2E0YTgtYmYzNmQxNDktZGM4Yjc3MTI=, ActorId: [1:7439876217024315194:4417], ActorState: ExecuteState, TraceId: 01jd8gc08253xec2y4qqrjxwgt, Create QueryResponse for error on request, msg: >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=132231694.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231694.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231694.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231694.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231694.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231694.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231694.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230494.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112231694.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112231694.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230494.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112230494.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112230494.000000s;Name=;Codec=}; 2024-11-21T23:18:15.159216Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:18:15.252211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:18:15.274017Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:18:15.274100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:18:15.274338Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:18:15.282797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:18:15.283033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:18:15.283292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:18:15.283415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:18:15.283529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:18:15.283634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:18:15.283756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:18:15.283880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:18:15.283988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:18:15.284102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:18:15.284201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:18:15.284302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:18:15.309230Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:18:15.309305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:18:15.314407Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:18:15.314677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:18:15.314731Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:18:15.314903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:15.315060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:18:15.315128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:18:15.315170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:18:15.315293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:18:15.315365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:18:15.315412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:18:15.315471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:18:15.315667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:15.315732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:18:15.315772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:18:15.315804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:18:15.315901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:18:15.315954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:18:15.315996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:18:15.316024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:18:15.316096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:18:15.316132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:18:15.316160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:18:15.316208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:18:15.316246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:18:15.316273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:18:15.316440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2024-11-21T23:18:15.316524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2024-11-21T23:18:15.316618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2024-11-21T23:18:15.316698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2024-11-21T23:18:15.316860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:18:15.316944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:18:15.316985Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSc ... (column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:19:31.895656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:917:2917] finished for tablet 9437184 2024-11-21T23:19:31.895728Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:917:2917] send ScanData to [1:916:2916] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:19:31.896129Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:917:2917] and sent to [1:916:2916] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.241},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.244}],"full":{"a":1732231171651278,"name":"_full_task","f":1732231171651278,"d_finished":0,"c":0,"l":1732231171895767,"d":244489},"events":[{"name":"bootstrap","f":1732231171651490,"d_finished":5387,"c":1,"l":1732231171656877,"d":5387},{"a":1732231171895452,"name":"ack","f":1732231171892441,"d_finished":2260,"c":3,"l":1732231171895387,"d":2575},{"a":1732231171895444,"name":"processing","f":1732231171658082,"d_finished":158672,"c":24,"l":1732231171895389,"d":158995},{"name":"ProduceResults","f":1732231171653613,"d_finished":5690,"c":29,"l":1732231171895641,"d":5690},{"a":1732231171895643,"name":"Finish","f":1732231171895643,"d_finished":0,"c":0,"l":1732231171895767,"d":124},{"name":"task_result","f":1732231171658101,"d_finished":156106,"c":21,"l":1732231171892294,"d":156106}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:917:2917]->[1:916:2916] 2024-11-21T23:19:31.896204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:19:31.650853Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T23:19:31.896235Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:19:31.896380Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.078777s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.073240s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::LAST_PK;duration=0.036704s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:19:31.896469Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:917:2917];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:19:31.898130Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T23:19:31.898319Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T23:19:31.898469Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T23:19:31.898615Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T23:19:31.898681Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T23:19:31.899109Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:924:2924];trace_detailed=; 2024-11-21T23:19:31.899448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T23:19:31.899678Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:19:31.899852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:19:31.899993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:19:31.900190Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:19:31.900270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:19:31.900390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:19:31.900448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:924:2924] finished for tablet 9437184 2024-11-21T23:19:31.900525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:924:2924] send ScanData to [1:923:2923] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:19:31.900920Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:924:2924] and sent to [1:923:2923] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1732231171899057,"name":"_full_task","f":1732231171899057,"d_finished":0,"c":0,"l":1732231171900573,"d":1516},"events":[{"name":"bootstrap","f":1732231171899207,"d_finished":819,"c":1,"l":1732231171900026,"d":819},{"a":1732231171900172,"name":"ack","f":1732231171900172,"d_finished":0,"c":0,"l":1732231171900573,"d":401},{"a":1732231171900159,"name":"processing","f":1732231171900159,"d_finished":0,"c":0,"l":1732231171900573,"d":414},{"name":"ProduceResults","f":1732231171899771,"d_finished":477,"c":2,"l":1732231171900433,"d":477},{"a":1732231171900435,"name":"Finish","f":1732231171900435,"d_finished":0,"c":0,"l":1732231171900573,"d":138}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:924:2924]->[1:923:2923] 2024-11-21T23:19:31.901006Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:19:31.898729Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T23:19:31.901052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:19:31.901095Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T23:19:31.901180Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:924:2924];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 160000/9495672 160000/9495672 160000/9495672 80000/4750028 0/0 >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |87.1%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2024-11-21T23:18:33.351318Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1732231113351275 2024-11-21T23:18:33.917339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875981400062792:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:33.917423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:34.011551Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875984074347860:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:34.212957Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:34.211977Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00132b/r3tmp/tmpjYl222/pdisk_1.dat 2024-11-21T23:18:34.255695Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:34.463402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:34.463570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:34.470500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:34.470575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:34.475795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:34.479781Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:34.485873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4386, node 1 2024-11-21T23:18:34.529502Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:34.596591Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:34.596629Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:34.643168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/00132b/r3tmp/yandexvVqYCp.tmp 2024-11-21T23:18:34.643195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/00132b/r3tmp/yandexvVqYCp.tmp 2024-11-21T23:18:34.643524Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/00132b/r3tmp/yandexvVqYCp.tmp 2024-11-21T23:18:34.643638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:34.702676Z INFO: TTestServer started on Port 9417 GrpcPort 4386 TClient is connected to server localhost:9417 PQClient connected to localhost:4386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:35.168651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:18:38.024481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876001254217212:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.025155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876001254217179:2280], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.025240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.030056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876002874900247:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.030101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876002874900262:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.030141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:38.033285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2024-11-21T23:18:38.058225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876002874900283:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:38.058614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876001254217217:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2024-11-21T23:18:38.325119Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876002874900387:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:38.326012Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDM1YTBmNzEtNjc2YTViZWMtYzlkZjBiYTItYjJiNDhjNGQ=, ActorId: [1:7439876002874900243:2301], ActorState: ExecuteState, TraceId: 01jd8gaf6bf7zkwmn4kd8bzwfe, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:38.328270Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:38.326557Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876001254217252:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:38.327941Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGQyM2QzMC01MGIzOWVmOC1hMThjMTNjNS1jN2M4MWYzYg==, ActorId: [2:7439876001254217177:2279], ActorState: ExecuteState, TraceId: 01jd8gaf62a21fdsf93dr6fe7d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:38.330635Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:38.332676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:38.505381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:38.651010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:4386", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:18:38.916128Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875981400062792:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:38.916219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:38.990472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jd8gafy6dr62mapcc0arfjsm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU2ZTE1NjgtNTQ1YThjOGEtMTFlMTgwM2YtNzAwZTE3MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:18:39.011471Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875984074347860:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:39.011560Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === ... :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876094631202447:2470] (SourceId=src, PreferedPartition=(NULL)) Start idle 2024-11-21T23:19:00.198184Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2024-11-21T23:19:00.200469Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:00.200569Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2024-11-21T23:19:00.200537Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439876098926169802:2470], now have 1 active actors on pipe 2024-11-21T23:19:00.200773Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:19:00.200806Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:19:00.200940Z node 4 :PERSQUEUE INFO: new Cookie src|152f5ce9-71e25a1-4e65d895-88887c9a_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2024-11-21T23:19:00.201049Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:19:00.201138Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:19:00.201482Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:19:00.201506Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:19:00.201590Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:19:00.201814Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|152f5ce9-71e25a1-4e65d895-88887c9a_0 2024-11-21T23:19:00.202564Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732231140202 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:00.202680Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|152f5ce9-71e25a1-4e65d895-88887c9a_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2024-11-21T23:19:00.202830Z :INFO: [] MessageGroupId [src] SessionId [src|152f5ce9-71e25a1-4e65d895-88887c9a_0] Write session: close. Timeout = 0 ms 2024-11-21T23:19:00.202873Z :INFO: [] MessageGroupId [src] SessionId [src|152f5ce9-71e25a1-4e65d895-88887c9a_0] Write session will now close 2024-11-21T23:19:00.202908Z :DEBUG: [] MessageGroupId [src] SessionId [src|152f5ce9-71e25a1-4e65d895-88887c9a_0] Write session: aborting 2024-11-21T23:19:00.203695Z :INFO: [] MessageGroupId [src] SessionId [src|152f5ce9-71e25a1-4e65d895-88887c9a_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:19:00.203727Z :DEBUG: [] MessageGroupId [src] SessionId [src|152f5ce9-71e25a1-4e65d895-88887c9a_0] Write session is aborting and will not restart 2024-11-21T23:19:00.203781Z :DEBUG: [] MessageGroupId [src] SessionId [src|152f5ce9-71e25a1-4e65d895-88887c9a_0] Write session: destroy 2024-11-21T23:19:00.203968Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|152f5ce9-71e25a1-4e65d895-88887c9a_0 grpc read done: success: 0 data: 2024-11-21T23:19:00.203990Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|152f5ce9-71e25a1-4e65d895-88887c9a_0 grpc read failed 2024-11-21T23:19:00.204012Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|152f5ce9-71e25a1-4e65d895-88887c9a_0 grpc closed 2024-11-21T23:19:00.204036Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|152f5ce9-71e25a1-4e65d895-88887c9a_0 is DEAD 2024-11-21T23:19:00.205129Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:19:00.205399Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:00.205448Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876098926169802:2470] destroyed 2024-11-21T23:19:00.205473Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2024-11-21T23:19:00.276665Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T23:19:02.278622Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T23:19:03.668454Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T23:19:04.282755Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === In the next federation discovery response dc2 will be available 2024-11-21T23:19:04.719057Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:19:04.719089Z node 3 :IMPORT WARN: Table profiles were not loaded Test retry state: get retry delay 2024-11-21T23:19:06.283701Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T23:19:08.284666Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T23:19:08.673667Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T23:19:10.285712Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T23:19:12.290642Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T23:19:13.670899Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T23:19:14.298555Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T23:19:16.302999Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T23:19:18.314625Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T23:19:18.671453Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T23:19:20.318668Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T23:19:22.334637Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T23:19:23.672964Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 Test retry state: get retry delay 2024-11-21T23:19:24.341529Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T23:19:26.358112Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2024-11-21T23:19:28.358978Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2024-11-21T23:19:28.650742Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2024-11-21T23:19:28.650819Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2024-11-21T23:19:28.655819Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2024-11-21T23:19:28.656127Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2024-11-21T23:19:28.675222Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:19:28.663123Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2024-11-21T23:19:28.663969Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 === Waiting for repair >>> Ready to answer: ok 2024-11-21T23:19:30.366002Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:24125" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:24125" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:24125" location: "dc3" status: AVAILABLE weight: 500 } ] } === Closing the session 2024-11-21T23:19:30.391272Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: try to update token 2024-11-21T23:19:30.391778Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2024-11-21T23:19:30.397747Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2024-11-21T23:19:30.397790Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session will now close 2024-11-21T23:19:30.397848Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: aborting 2024-11-21T23:19:30.398329Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session is aborting and will not restart 2024-11-21T23:19:30.413195Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2024-11-21T23:19:30.413259Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: destroy 2024-11-21T23:19:31.250230Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439876232070157604:2732] TxId: 281474976715733. Ctx: { TraceId: 01jd8gc2mh7hrrezytsacjd8pg, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTljYzc3YjItYmFlMjA4MjQtZDk1NzQ4NjQtZDJkMDU2YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2024-11-21T23:19:31.267171Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876232070157612:2741], TxId: 281474976715733, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTljYzc3YjItYmFlMjA4MjQtZDk1NzQ4NjQtZDJkMDU2YjU=. CustomerSuppliedId : . TraceId : 01jd8gc2mh7hrrezytsacjd8pg. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439876232070157604:2732], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2024-11-21T23:19:31.269023Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876232070157613:2742], TxId: 281474976715733, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=NTljYzc3YjItYmFlMjA4MjQtZDk1NzQ4NjQtZDJkMDU2YjU=. TraceId : 01jd8gc2mh7hrrezytsacjd8pg. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439876232070157604:2732], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 3259, MsgBus: 18607 2024-11-21T23:19:07.282612Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876126977052482:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:07.282671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028cd/r3tmp/tmpA95wCp/pdisk_1.dat 2024-11-21T23:19:07.663854Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:07.671995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:07.672086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:07.677079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3259, node 1 2024-11-21T23:19:07.830890Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:07.830910Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:07.830916Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:07.831004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18607 TClient is connected to server localhost:18607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:08.561545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.595307Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:08.600090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.808856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.998213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:09.110714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.035319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876139861955859:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:11.046933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:11.080699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.118477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.152601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.192716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.217658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.257354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.308771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876144156923650:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:11.308840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:11.309050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876144156923655:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:11.312869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:11.324173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876144156923657:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:12.282729Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876126977052482:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:12.282825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:13.402032Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231153359, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 28298, MsgBus: 21233 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028cd/r3tmp/tmpiFeu6x/pdisk_1.dat 2024-11-21T23:19:14.602616Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:14.646264Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:14.700228Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:14.700320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:14.708497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28298, node 2 2024-11-21T23:19:14.877932Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:14.877953Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:14.877961Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:14.878061Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21233 TClient is connected to server localhost:21233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:15.915532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:15.923316Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:15.941264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:16.088730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:16.361570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:16.520231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:19.784341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876178362413107:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:19.784435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:19.829046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:19.878213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:19.954264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.035304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.114775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.178300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.310917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876182657380902:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:20.311033Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:20.311565Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876182657380907:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:20.316902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:20.350583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876182657380909:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:21.667164Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231161668, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 13856, MsgBus: 16858 2024-11-21T23:19:22.972356Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876192539954547:2197];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028cd/r3tmp/tmpul41vH/pdisk_1.dat 2024-11-21T23:19:23.150674Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:23.203035Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:23.217544Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:23.217628Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:23.219357Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13856, node 3 2024-11-21T23:19:23.335486Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:23.335505Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:23.335529Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:23.335670Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16858 TClient is connected to server localhost:16858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:24.113283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.123130Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:24.141226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.231107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.634218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.754021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:27.950508Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876192539954547:2197];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:27.967213Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:29.166928Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876222604727174:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.167315Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.219195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.328680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.429624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.518705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.585097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.677557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.796304Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876222604727687:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.796374Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.796591Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876222604727692:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.800305Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:29.815456Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876222604727694:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpStats::RequestUnitForExecute [GOOD] >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/dl5p/00103c/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 19743, MsgBus: 26187 2024-11-21T23:18:22.378774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875932674983586:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:22.378827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00103c/r3tmp/tmpZqW02c/pdisk_1.dat 2024-11-21T23:18:23.494237Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:23.524944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:23.531454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:23.531566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:23.555014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19743, node 1 2024-11-21T23:18:23.846942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:23.846965Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:23.846971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:23.847059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26187 TClient is connected to server localhost:26187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:24.852110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:24.871319Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:24.881999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.036968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.386549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.479072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.408819Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875932674983586:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:27.409063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:27.448566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875954149821556:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.448696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.715740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.751895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.794521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.828080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.906738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:27.988936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.050427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875958444789353:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.050493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.050544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875958444789358:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.054219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:28.064495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875958444789360:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:38.462475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:38.462508Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"f3c4a075-54a01ec3-160248ad-a5bb082d") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"76335d9f-faa07735-4d9d5e45-fe4acc8f") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"fb6d67a6-2b6c5bad-2807673f-a63d0e1c")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::QueryStats >> TColumnShardTestSchema::HotTiersRevCompression >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::ReadTableRangesFullScan |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |87.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> TColumnShardTestSchema::ColdCompactionSmoke >> KqpExplain::CompoundKeyRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 19510, MsgBus: 14799 2024-11-21T23:19:06.691050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876122794077507:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:06.691079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d4/r3tmp/tmpza9Zid/pdisk_1.dat 2024-11-21T23:19:07.149382Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19510, node 1 2024-11-21T23:19:07.183117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:07.183264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:07.185811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:07.344804Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:07.344838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:07.344846Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:07.344985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14799 TClient is connected to server localhost:14799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:08.222760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.251331Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:08.263850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.513547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.822481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.991857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:11.743187Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876122794077507:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:11.743277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:12.031527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876144268915684:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.031770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.220556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.271066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.347675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.418891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.487071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.554218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.673410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876148563883486:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.673472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.673738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876148563883491:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.677810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:12.707192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876148563883493:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:14.512135Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876157153818435:2471], status: GENERIC_ERROR, issues:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2024-11-21T23:19:14.513566Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTNjZmQ4ZGMtZWUzM2U5ZDQtYzk5Y2JlNi1iY2IyZWQ2ZQ==, ActorId: [1:7439876157153818427:2466], ActorState: ExecuteState, TraceId: 01jd8gbjt1d3ma4zeggrajaw57, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... Trying to start YDB, gRPC: 1909, MsgBus: 4955 2024-11-21T23:19:15.825320Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876162530653783:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d4/r3tmp/tmpAHoqLb/pdisk_1.dat 2024-11-21T23:19:15.957528Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:16.168162Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:16.228381Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:16.228482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:16.235652Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1909, node 2 2024-11-21T23:19:16.552091Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:16.552118Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:16.552133Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:16.552242Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4955 TClient is connected to server localhost:4955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:18.116489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.124372Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:18.133496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.231451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.506482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.648391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:20.738947Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876162530653783:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:20.739034Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:21.862866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876188300459139:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.862977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.935134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.020629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.099174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.212389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.269595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.383568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.642547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876192595426946:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:22.642627Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:22.646257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876192595426951:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:22.651442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:22.679491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876192595426953:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 18948, MsgBus: 26343 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d4/r3tmp/tmppKJSlq/pdisk_1.dat 2024-11-21T23:19:26.797551Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:26.873222Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:26.952869Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:26.952968Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:26.959688Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18948, node 3 2024-11-21T23:19:27.184455Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:27.184483Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:27.184493Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:27.184606Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26343 TClient is connected to server localhost:26343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:28.756378Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.773675Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:28.786318Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.902687Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:29.222929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:29.335066Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:32.197257Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876236974416340:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.197404Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.292746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.378307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.462926Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.563287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.645559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.709220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.833179Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876236974416839:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.833275Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.833712Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876236974416844:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.839047Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:32.869720Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:32.870629Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876236974416846:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Consumed units: 340 Consumed units: 6 >> KqpStats::JoinStatsBasicScan [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/dl5p/00102e/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 63912, MsgBus: 13246 2024-11-21T23:18:22.798000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875933084919632:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:22.813737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00102e/r3tmp/tmpGH8rlP/pdisk_1.dat 2024-11-21T23:18:23.530627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:23.530743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:23.531920Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:23.546012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63912, node 1 2024-11-21T23:18:23.839739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:23.839769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:23.839797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:23.839902Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13246 TClient is connected to server localhost:13246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:24.853953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:24.883183Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:18:24.899712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.045251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.241789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:25.345157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:27.602265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875954559757687:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.602368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:27.774121Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875933084919632:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:27.774209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:27.953735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.042886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.134644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.183707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.233307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.328237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:28.439362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875958854725495:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.439466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.444095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875958854725500:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:28.449255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:28.466214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875958854725502:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:18:38.531196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:38.531241Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '763) '('"_id" '"86e40d2b-a41dfd7b-8164926b-424a1d6a") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '661) '('"_id" '"d0125956-5d642c25-845ab154-4b9f546") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '673) '('"_id" '"9aa7cfde-99a04324-666356ff-ba37d6e2")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> TColumnShardTestSchema::HotTiersWithStat >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile |87.1%| [TA] $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::QueryCancelWrite [GOOD] >> KqpParams::Decimal+QueryService [GOOD] >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::RequestUnitForBadRequestExecute >> KqpQueryPerf::IndexUpsert+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange [GOOD] Test command err: Trying to start YDB, gRPC: 18773, MsgBus: 24602 2024-11-21T23:19:08.634843Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876131541430647:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:08.634894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028ba/r3tmp/tmpgDnX03/pdisk_1.dat 2024-11-21T23:19:09.337220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:09.337311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:09.351453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:09.414515Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18773, node 1 2024-11-21T23:19:09.614491Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:09.614515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:09.614525Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:09.614613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24602 TClient is connected to server localhost:24602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:10.623282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.650347Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:10.670994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.937714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:11.277271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:11.352980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:13.636623Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876131541430647:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:13.636686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:13.768458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876153016268771:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.768564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.071395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.160869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.233451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.285066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.319187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.378896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.480243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876157311236574:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.480330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.483421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876157311236580:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.490557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:14.517368Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:14.517617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876157311236582:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 25830, MsgBus: 24309 2024-11-21T23:19:18.819506Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:5 ... Table, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.458226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.585378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876202591237171:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:24.585457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:24.585589Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876202591237176:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:24.590687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:24.616734Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:24.617087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876202591237178:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:26.214093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:26.718806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.808266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1132, MsgBus: 24682 2024-11-21T23:19:29.854865Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876223108551097:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:29.854916Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028ba/r3tmp/tmplLdy9z/pdisk_1.dat 2024-11-21T23:19:30.024058Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:30.027537Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:30.027700Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:30.118750Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1132, node 3 2024-11-21T23:19:30.299162Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:30.299189Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:30.299201Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:30.299348Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24682 TClient is connected to server localhost:24682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:31.022727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:31.029688Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:31.043982Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:31.140624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:31.391822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:31.480162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:34.375842Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876244583389286:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.375973Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.436870Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.518330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.568191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.616913Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.690855Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.785092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.858455Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876223108551097:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:34.858512Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:34.874824Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876244583389786:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.874999Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.875475Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876244583389791:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.881433Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:34.899483Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876244583389793:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"lookup_by":["App (new_app_1)","Ts (49)"],"columns":["App","Host","Message","Ts"],"scan_by":["Host (null, xyz)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 10352, MsgBus: 11064 2024-11-21T23:19:06.051178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876124736327058:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:06.051222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d6/r3tmp/tmpWoUgbG/pdisk_1.dat 2024-11-21T23:19:06.645259Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:06.671685Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:06.671776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:06.674029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10352, node 1 2024-11-21T23:19:06.838969Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:06.838997Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:06.839012Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:06.839100Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11064 TClient is connected to server localhost:11064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:07.558836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.594785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:07.603409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:07.839066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.128526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:08.211062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.501334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876141916197954:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.501459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:10.902254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.003455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.057220Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876124736327058:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:11.058554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:11.075960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.139859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.182990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.267667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:11.355631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876146211165759:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:11.355711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:11.357406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876146211165764:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:11.362107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:11.380104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876146211165766:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 6922, MsgBus: 15298 2024-11-21T23:19:15.046778Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876160449718573:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:15.046828Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d6/r3tmp/tmpLZ6uwZ/pdisk_1.dat 2024-11-21T23:19:15.295681Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6922, node 2 2024-11-21T23:19:15.379880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:15.379974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:15.431767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:15.558978Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:15.559001Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:15.559009Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:15.559115Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15298 TClient is connected to server localhost:15298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:17.285702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:17.321678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:17.452006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:17.800789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.028386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:20.049992Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876160449718573:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:20.050039Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:21.178716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876186219524047:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.178802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.200895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.267311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.312646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.351946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.389538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.472195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.546563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876186219524543:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.546692Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.550965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876186219524548:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.555344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:21.587089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876186219524550:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 62700, MsgBus: 7219 2024-11-21T23:19:25.058147Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876206070811859:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:25.058196Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028d6/r3tmp/tmpJHNx45/pdisk_1.dat 2024-11-21T23:19:25.270167Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:25.270281Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:25.274333Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62700, node 3 2024-11-21T23:19:25.642756Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:25.646914Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:19:25.647101Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:19:25.675608Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:25.675636Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:25.675649Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:25.675769Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7219 TClient is connected to server localhost:7219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:27.487646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:27.518673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:27.629407Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:27.946594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.123893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:30.062759Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876206070811859:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:30.062812Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:31.855467Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876231840617221:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.855692Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.890749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:31.954237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.013363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.182170Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.286157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.385091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.515698Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876236135585027:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.515797Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.516109Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876236135585032:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.520553Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:32.539584Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876236135585034:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:36.514154Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231175542, txId: 281474976710671] shutting down >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> KqpLimits::DatashardProgramSize >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> KqpStats::DataQueryMulti [GOOD] >> KqpQuery::QueryExplain [GOOD] >> KqpQuery::QueryFromSqs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2024-11-21T23:19:35.669329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:19:35.669402Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpQuery::TryToUpdateNonExistentColumn >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> KqpLimits::BigParameter >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWrite [GOOD] Test command err: Trying to start YDB, gRPC: 8840, MsgBus: 29591 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028bf/r3tmp/tmpx8UIMV/pdisk_1.dat 2024-11-21T23:19:08.538506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:08.736871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:08.736976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:08.740254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:08.757092Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8840, node 1 2024-11-21T23:19:08.915706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:08.915726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:08.915732Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:08.915824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29591 TClient is connected to server localhost:29591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:09.873625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.904276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.035067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.227356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.307741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:12.723171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876147402860239:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.736802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.772385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.814764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.904764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.956995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.025848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.254892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.327444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876151697828043:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.327554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.334569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876151697828048:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.339854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:13.375446Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:13.375931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876151697828050:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 3095, MsgBus: 4112 2024-11-21T23:19:20.497528Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876183692262251:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:20.497645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028bf/r3tmp/tmpFgXH5J/pdisk_1.dat 2024-11-21T23:19:20.783909Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:20.803617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:20.803732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:20.804953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3095, node 2 2024-11-21T23:19:20.878961Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:20.878984Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:20.878992Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:20.879110Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4112 TClient is connected to server localhost:4112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:19:21.545439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.587078Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:21.608897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:21.707536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:21.902586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:22.003902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.201365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876205167100203:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.201446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.223974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opI ... 9:30.229701Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjNiNTUyYzAtZDZmMGIyMDQtYjJiOWFkOWEtNjIxNzNlNjk=, ActorId: [2:7439876226641938051:2603], ActorState: ExecuteState, TraceId: 01jd8gc24p9xx69jcf4ptmr2vg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:19:30.325010Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439876226641938078:3893], for# user0@builtin, access# DescribeSchema 2024-11-21T23:19:30.325039Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439876226641938078:3893], for# user0@builtin, access# DescribeSchema 2024-11-21T23:19:30.327896Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876226641938074:2614], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:19:30.329708Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Zjc5NmExZGItMmUyZDM1MzQtNTFlMmNlYmQtYjI3N2NiNjc=, ActorId: [2:7439876226641938070:2612], ActorState: ExecuteState, TraceId: 01jd8gc27rbq89ae1r3zcw782b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:19:30.360791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710695:0, at schemeshard: 72057594046644480 2024-11-21T23:19:30.488176Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439876226641938106:3905], for# user0@builtin, access# DescribeSchema 2024-11-21T23:19:30.488208Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439876226641938106:3905], for# user0@builtin, access# DescribeSchema 2024-11-21T23:19:30.493398Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876226641938103:2624], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:19:30.495215Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmVmZjVhNTktZTgyY2ZiN2MtNmY2ZDhjM2YtNzE5YjNjZTg=, ActorId: [2:7439876226641938098:2622], ActorState: ExecuteState, TraceId: 01jd8gc2cn55j74wxrf3txtn74, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:19:30.634879Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439876226641938127:3912], for# user0@builtin, access# DescribeSchema 2024-11-21T23:19:30.634924Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439876226641938127:3912], for# user0@builtin, access# DescribeSchema 2024-11-21T23:19:30.637882Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876226641938123:2633], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:19:30.640465Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTg5MTEzZDQtODM5ZTgxNDUtYzg2YmJkYTItMmI1NzBhOTQ=, ActorId: [2:7439876226641938118:2631], ActorState: ExecuteState, TraceId: 01jd8gc2h89bns8c5q9mh2fkhv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:19:30.697655Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439876226641938148:3918], for# user0@builtin, access# DescribeSchema 2024-11-21T23:19:30.697686Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:7439876226641938148:3918], for# user0@builtin, access# DescribeSchema 2024-11-21T23:19:30.700493Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876226641938142:2642], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:19:30.702735Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODk4YWE4NmQtNzZkZDk0NmEtMjYyNjNjOTMtNTNiNmEyMDQ=, ActorId: [2:7439876226641938138:2640], ActorState: ExecuteState, TraceId: 01jd8gc2kv882zt2ew7w2k7bxf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 11296, MsgBus: 22973 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028bf/r3tmp/tmpkPPjai/pdisk_1.dat 2024-11-21T23:19:32.034632Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:32.041244Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:32.065841Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:32.065932Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:32.068700Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11296, node 3 2024-11-21T23:19:32.230981Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:32.231005Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:32.231013Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:32.231121Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22973 TClient is connected to server localhost:22973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:32.836915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:32.845734Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:32.857292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:32.955905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.174975Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.280260Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:35.703548Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876247306149173:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:35.703643Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:35.757236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:35.811170Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:35.865425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:35.945789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.015632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.102780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.196133Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876251601116967:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.196245Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.196296Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876251601116972:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.200296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:36.221743Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876251601116974:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 26724, MsgBus: 27463 2024-11-21T23:19:24.873100Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876202563986137:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:24.873142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00284f/r3tmp/tmpPsirfW/pdisk_1.dat 2024-11-21T23:19:25.924067Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:25.931001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:25.931095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:25.941603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:25.949988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26724, node 1 2024-11-21T23:19:26.193957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:26.193979Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:26.193990Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:26.194078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27463 TClient is connected to server localhost:27463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:27.679400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:27.719370Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:27.737104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:27.991247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.402203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.575433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:29.883356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876202563986137:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:29.883433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:31.595131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876232628758936:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.595250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.954605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.002735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.044428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.128682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.189699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.297356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:32.414902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876236923726737:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.414996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.415403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876236923726742:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:32.419533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:32.455607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876236923726744:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:34.503732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.556012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.622664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpLimits::ManyPartitionsSortingLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28642, MsgBus: 25992 2024-11-21T23:19:08.193418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876131824628719:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:08.193711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028c0/r3tmp/tmppqiH6b/pdisk_1.dat 2024-11-21T23:19:08.600091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:08.600186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:08.603109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28642, node 1 2024-11-21T23:19:08.671712Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:08.683347Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:19:08.684434Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:19:08.780323Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:08.780347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:08.780356Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:08.780429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25992 TClient is connected to server localhost:25992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:09.480237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.507461Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:09.518646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.689052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.900167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.985030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:12.735768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876149004499425:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.735928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.833699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.924437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:12.991934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.065642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.225133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876131824628719:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:13.225185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:13.252129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.325929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.427579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876153299467234:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.427634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.427962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876153299467239:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.431422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:13.440009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876153299467242:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 16190, MsgBus: 29331 2024-11-21T23:19:16.292312Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876165513454328:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:16.292346Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028c0/r3tmp/tmpk7jpCo/pdisk_1.dat 2024-11-21T23:19:16.600709Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:16.651583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:16.651669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:16.659599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16190, node 2 2024-11-21T23:19:16.829015Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:16.829045Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:16.829053Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:16.829132Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29331 TClient is connected to server localhost:29331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:17.974988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:17.988967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.066540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.310578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.415906Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:21.292496Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor ... Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:22.468387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:22.493962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876191283260192:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 25313, MsgBus: 21599 2024-11-21T23:19:26.086905Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876210849988814:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:26.100956Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028c0/r3tmp/tmpjrMSmE/pdisk_1.dat 2024-11-21T23:19:26.506359Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:26.514040Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:26.514147Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:26.524660Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25313, node 3 2024-11-21T23:19:26.775712Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:26.775742Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:26.775752Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:26.775885Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21599 TClient is connected to server localhost:21599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:28.308741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.317455Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:28.337934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.611998Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.972756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:29.395091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:31.066070Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876210849988814:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:31.066142Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:33.646591Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876240914761600:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:33.646715Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:33.730155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:33.874898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:33.965536Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.065357Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.178641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.385598Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.504462Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876245209729421:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.504563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.505065Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876245209729426:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.510231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:34.548392Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876245209729428:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:36.396628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.172842Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876258094631878:2510], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2024-11-21T23:19:37.173163Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDMzYTdmMGItYTRhOTJmMjYtZDJlODY5Ni02ODMwNDM1Ng==, ActorId: [3:7439876258094631876:2509], ActorState: ExecuteState, TraceId: 01jd8gc8xp4mhx6a92rtaz6qnk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T23:19:37.288765Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTE0Y2ViNTUtMjhkZjhmNWItZjAwNGI0Yi04M2E4YmU3ZA==, ActorId: [3:7439876258094631900:2512], ActorState: ExecuteState, TraceId: 01jd8gc8z93hdy8q9j03vykgzm, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2024-11-21T23:19:37.337800Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876258094631917:2518], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T23:19:37.341636Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjcxMjQ0MDItMmQxNzVlOTEtZDkwMTAzMTMtNjU2NzdlZDc=, ActorId: [3:7439876258094631915:2517], ActorState: ExecuteState, TraceId: 01jd8gc92k2mav4g045zvdbf1d, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2024-11-21T23:19:37.391745Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876258094631949:2524], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2024-11-21T23:19:37.392421Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2U5Y2RkMGQtZjBhZDczYTctZjcyM2QwMDAtYjM5OGU5OTY=, ActorId: [3:7439876258094631947:2523], ActorState: ExecuteState, TraceId: 01jd8gc94sacfq5f84qr78nmsy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 64355, MsgBus: 4364 2024-11-21T23:19:15.053197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876161830858396:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:15.053468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028b3/r3tmp/tmpHVYFPA/pdisk_1.dat 2024-11-21T23:19:15.408289Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:15.428775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:15.428853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:15.431777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64355, node 1 2024-11-21T23:19:15.630958Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:15.630980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:15.630986Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:15.631064Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4364 TClient is connected to server localhost:4364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:16.636247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:16.669816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:17.009920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:17.494622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:17.666263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:20.062834Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876161830858396:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:20.120679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:20.257383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876183305696444:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:20.257530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:20.530109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.602258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.642289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.738664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.835816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:20.912913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.016800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876187600664254:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.016918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.017287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876187600664259:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:21.021854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:21.043649Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T23:19:21.043944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876187600664261:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:22.767150Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876191895631963:2475], status: GENERIC_ERROR, issues:
:3:26: Error: Unexpected token '[' : cannot match to any predicted input...
:3:32: Error: Unexpected token '/' : cannot match to any predicted input... 2024-11-21T23:19:22.768449Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGY5ZGJmYmQtYWYxNDEzMjgtZTU5ZGRkNGEtZTZmZGYzY2U=, ActorId: [1:7439876191895631875:2465], ActorState: ExecuteState, TraceId: 01jd8gbtw04fm6ybmztad6synh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:3:26: Error: Unexpected token '[' : cannot match to any predicted input...
:3:32: Error: Unexpected token '/' : cannot match to any predicted input... Trying to start YDB, gRPC: 17852, MsgBus: 32169 2024-11-21T23:19:23.975425Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876198006954307:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:24.047288Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028b3/r3tmp/tmpeZVUS8/pdisk_1.dat 2024-11-21T23:19:24.281071Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:24.334267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:24.334361Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17852, node 2 2024-11-21T23:19:24.341394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:24.542975Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:24.542997Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:24.543006Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:24.543102Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32169 TClient is connected to server localhost:32169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:25.080519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:25.095120Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:25.104416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:25.199726Z node 2 :FLAT_TX_SCHE ... 28.924386Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:28.927656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876219481792356:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:28.927737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.002775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.069416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.152423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.226267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.325605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.407663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.557962Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876223776760159:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.558058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.558459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876223776760164:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.563308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:29.594496Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:29.594787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876223776760166:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:31.234044Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876232366695108:2471], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2024-11-21T23:19:31.234583Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjgyNzNlMC1iMzQ3YzFhOS1kNWIzZDItZjU2YmEzODI=, ActorId: [2:7439876232366695098:2465], ActorState: ExecuteState, TraceId: 01jd8gc33c30pnfpbkrjyj1zhb, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: Trying to start YDB, gRPC: 9381, MsgBus: 20799 2024-11-21T23:19:32.197298Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876234004152646:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:32.197348Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028b3/r3tmp/tmpbOizI7/pdisk_1.dat 2024-11-21T23:19:32.305786Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:32.331179Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:32.331283Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:32.335858Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9381, node 3 2024-11-21T23:19:32.459104Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:32.459132Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:32.459141Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:32.459248Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20799 TClient is connected to server localhost:20799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:33.461157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.467993Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:33.475901Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.564365Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.778800Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.897162Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:37.004634Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876251184023523:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.004730Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.064971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.130587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.199208Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876234004152646:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:37.199263Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:37.213924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.243185Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.303047Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.388609Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.441260Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876255478991328:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.441366Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.441574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876255478991333:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.445107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:37.460741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876255478991335:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpQuery::GenericQueryNoRowsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 25274, MsgBus: 23461 2024-11-21T23:19:08.053302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876130902836934:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:08.058604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028c8/r3tmp/tmpPoxcNf/pdisk_1.dat 2024-11-21T23:19:08.758546Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:08.771939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:08.772017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:08.779360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25274, node 1 2024-11-21T23:19:08.979052Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:08.979074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:08.979080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:08.979207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23461 TClient is connected to server localhost:23461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:09.793751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:09.837208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.042423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.296390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:10.472704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:13.055571Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876130902836934:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:13.055624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:13.245732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876152377674923:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.245866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:13.596712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.679593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.736533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.789051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.875960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.949200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:14.045721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876156672642732:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.045789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.046087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876156672642737:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:14.048741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:14.058296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876156672642739:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 18147, MsgBus: 28799 2024-11-21T23:19:17.400633Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876169364247484:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:17.400682Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028c8/r3tmp/tmphRRrq1/pdisk_1.dat 2024-11-21T23:19:17.702380Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:17.725118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:17.725206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:17.729050Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18147, node 2 2024-11-21T23:19:17.906691Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:17.906724Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:17.906733Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:17.906850Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28799 TClient is connected to server localhost:28799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:18.875798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:18.890641Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:22.124330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876190839084447:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:22.125544Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:22.125640Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876190839084482:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:22.133284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:19:22.155295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:74398761908 ... 72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:36.789249Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:36.789288Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:36.789462Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:19:36.789485Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:19:36.789614Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:19:36.789634Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:19:36.789789Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:19:36.789808Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:19:36.789926Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:19:36.789943Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:19:36.790372Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:36.790436Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:36.790597Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:36.790656Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:36.790877Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:36.790910Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:36.791026Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:36.791055Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:36.791166Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:36.791202Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:36.791239Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:36.791265Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:36.791690Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:36.791729Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:36.791986Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:19:36.792021Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:19:36.792205Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:19:36.792238Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:19:36.792487Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:19:36.792521Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:19:36.792684Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:19:36.792707Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:19:36.805854Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:36.805904Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:36.805998Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:36.806019Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:36.806168Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:36.806187Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:36.806266Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:36.806314Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:36.806367Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:36.806594Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:36.806638Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:36.806660Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:36.806977Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:36.807006Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:36.807177Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:19:36.807198Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:19:36.807310Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:19:36.807347Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:19:36.807495Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:19:36.807516Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:19:36.807640Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:19:36.807657Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> TBlobStorageProxyTest::TestPersistence |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH21-StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 5120, MsgBus: 8474 2024-11-21T23:17:37.969440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875741935329176:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:37.969508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f5f/r3tmp/tmpG220eM/pdisk_1.dat 2024-11-21T23:17:38.716697Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:38.723360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:38.723487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:38.727993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5120, node 1 2024-11-21T23:17:38.852281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:38.852322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:38.852341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:38.852439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8474 TClient is connected to server localhost:8474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:39.439725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.501163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.639956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.825073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:39.907870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:41.608533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875759115200053:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:41.608655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:41.922535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.980618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.026914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.057649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.102742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.135795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:42.187034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875763410167843:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.187112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.187486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875763410167848:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:42.193402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:42.238654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875763410167850:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:42.972814Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875741935329176:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:42.972866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:43.682576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.723420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.768005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.805191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:17:43.850567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.032105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.064922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.106023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.138624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.185790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.258384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.331235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.382158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:17:44.914009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T23:17:45.004122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:17:45.045435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T23:17:45.079652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:17:45.154660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T23:17:45.191684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T23:17:45.225708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part prop ... type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:19.495744Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T23:19:19.496144Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876177196820780:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:21.758860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.873695Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.968231Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.094856Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.193872Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.559269Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.677121Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.762819Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.856915Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.971054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.071113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.139291Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.210210Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.314022Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:2, at schemeshard: 72057594046644480 2024-11-21T23:19:24.465322Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.586748Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.681960Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.772361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.864345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.942565Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.025616Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.121916Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.225795Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715693:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.331957Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715694:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.568628Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.677040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.816540Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715697:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.081318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.203955Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715699:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.296897Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715700:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.437839Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715701:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.630780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:19:26.630929Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:26.657276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715702:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.767098Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715703:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.932983Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.115134Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715705:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.238722Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715706:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.381431Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.234612Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:1, at schemeshard: 72057594046644480 2024-11-21T23:19:28.401470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.508272Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.617394Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.755728Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.931624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.068773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715714:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.244636Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715715:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.396056Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.735669Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.888409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2024-11-21T23:19:30.042325Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2024-11-21T23:19:30.283349Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715720:0, at schemeshard: 72057594046644480 |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::UpdateWhereInSubquery >> KqpParams::MissingParameter >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQuery::QueryClientTimeout >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> KqpExplain::ReadTableRangesFullScan [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> KqpQuery::QueryStats [GOOD] >> KqpExplain::ExplainStream >> TColumnShardTestSchema::DropWriteRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 21296, MsgBus: 28707 2024-11-21T23:19:34.991963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876244701931158:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:34.992188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00284e/r3tmp/tmp8sanTG/pdisk_1.dat 2024-11-21T23:19:35.723719Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:35.768746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:35.768832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:35.775777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21296, node 1 2024-11-21T23:19:36.014479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:36.014498Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:36.014519Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:36.014599Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28707 TClient is connected to server localhost:28707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:37.104114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:37.135404Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:37.160203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:37.352889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:37.673778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:37.786739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:39.805496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876266176769203:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:39.805616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:39.980668Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876244701931158:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:39.980735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:40.188909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.226972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.264355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.314996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.360710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.407920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.521376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876270471737001:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:40.521477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:40.521716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876270471737006:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:40.525982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:40.538426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876270471737008:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> KqpQuery::QueryCache >> KqpQuery::PreparedQueryInvalidate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRangesFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 6683, MsgBus: 61713 2024-11-21T23:19:18.233922Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876173024584785:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:18.234125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028ae/r3tmp/tmpO9NpF1/pdisk_1.dat 2024-11-21T23:19:18.965215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:18.965312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:18.965735Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:18.969601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6683, node 1 2024-11-21T23:19:19.262985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:19.263014Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:19.263022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:19.263144Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61713 TClient is connected to server localhost:61713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:20.128154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:20.166256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:20.519867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:20.876448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:20.963205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:23.222587Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876173024584785:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:23.305200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:23.367810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876194499422836:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:23.367903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:23.594373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.679329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.734593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.776345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.819542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.899478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:23.978582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876194499423334:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:23.978675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:23.982417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876194499423339:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:23.986310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:24.005338Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:24.006170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876194499423342:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.Text","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 17381, MsgBus: 25538 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028ae/r3tmp/tmpFVTAe4/pdisk_1.dat 2024-11-21T23:19:27.353099Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:27.358859Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:27.380107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:27.380198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:27.385005Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17381, node 2 2024-11-21T23:19:27.438959Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:27.438981Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:27.438989Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:27.439095Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25538 TClient is connected to server localhost:25538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:27.887805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 wa ... ate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"GroupBy":"item.t1.Key","Aggregation":"state","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.t1.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":21,"Plans":[{"PlanNodeId":25,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"Foo.t1.Key = t1.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"SortBy":"row.Key","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":4},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 9050, MsgBus: 8265 2024-11-21T23:19:36.996080Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876251623438881:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:36.996822Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028ae/r3tmp/tmpiss7nG/pdisk_1.dat 2024-11-21T23:19:37.123559Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:37.153528Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:37.153641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9050, node 3 2024-11-21T23:19:37.155933Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:37.296974Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:37.297005Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:37.297015Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:37.297132Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8265 TClient is connected to server localhost:8265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:38.064070Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:38.095899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:38.184559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:38.357745Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:19:38.457292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.066756Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876273098277058:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:41.066872Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:41.117658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.155797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.194597Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.255455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.310312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.405989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.497018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876273098277562:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:41.497125Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:41.497526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876273098277567:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:41.500836Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:41.513040Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876273098277569:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:41.998839Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876251623438881:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:41.998914Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:42.856949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::DeleteImmediate >> KqpStats::RequestUnitForBadRequestExecute [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryStats [GOOD] Test command err: Trying to start YDB, gRPC: 21178, MsgBus: 8462 2024-11-21T23:19:19.189649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876179817704744:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:19.189977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028b2/r3tmp/tmpIC8d58/pdisk_1.dat 2024-11-21T23:19:20.051062Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:20.105758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:20.105842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:20.115713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21178, node 1 2024-11-21T23:19:20.360559Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:20.360590Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:20.360603Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:20.360729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8462 TClient is connected to server localhost:8462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:21.471768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:21.500734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:21.692161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:22.046058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:22.210578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.196191Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876179817704744:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:24.196305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:24.218917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876201292542812:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:24.219049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:24.730738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.786198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.856688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.909801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:24.979572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.027720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.129646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876205587510613:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.129750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.130104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876205587510618:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.134486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:25.146754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876205587510620:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:26.982863Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDYwMmYxZTYtMjM4YTM5ZTktMzI5ZTkzNWYtZTllZDE2ODg=, ActorId: [1:7439876209882478238:2466], ActorState: ExecuteState, TraceId: 01jd8gbyy02gywz436kznct0gz, Create QueryResponse for error on request, msg:
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 70ms during compilation Trying to start YDB, gRPC: 14867, MsgBus: 27215 2024-11-21T23:19:28.427730Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876218502962270:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:28.427763Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028b2/r3tmp/tmpY8xtnU/pdisk_1.dat 2024-11-21T23:19:28.875213Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:28.991685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:28.991781Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:29.003833Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14867, node 2 2024-11-21T23:19:29.223117Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:29.223139Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:29.223148Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:29.223259Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27215 TClient is connected to server localhost:27215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:30.387893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:30.416956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:30.520988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:30.749641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:30.867345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.434747Z node 2 :METADATA_PROVID ... :7762515]; 2024-11-21T23:19:33.434830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:33.483611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876239977800448:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:33.483690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:33.601203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:33.681645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:33.741987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:33.799660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:33.889427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.009743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:34.104108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876244272768250:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.104189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.104530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876244272768255:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:34.109042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:34.139893Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T23:19:34.140484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876244272768257:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } Trying to start YDB, gRPC: 14740, MsgBus: 27621 2024-11-21T23:19:36.996403Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876250562797552:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:36.996447Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028b2/r3tmp/tmpR8SIRv/pdisk_1.dat 2024-11-21T23:19:37.403745Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:37.404861Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:37.404942Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:37.417868Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14740, node 3 2024-11-21T23:19:37.618608Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:37.618630Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:37.618637Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:37.618757Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27621 TClient is connected to server localhost:27621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:38.480195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:38.492653Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:38.508083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:38.614648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:38.842989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:38.917796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.702546Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876272037635676:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:41.702649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:41.748315Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.806685Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.890997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:41.940972Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:42.000447Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876250562797552:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:42.004456Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:42.025143Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:42.130534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:42.212357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876276332603481:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:42.212512Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:42.212916Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876276332603486:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:42.216962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:42.228904Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876276332603488:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } query_phases { duration_us: 7011 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3609 affected_shards: 1 } query_phases { duration_us: 17640 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 2382 affected_shards: 2 } compilation { duration_us: 258979 cpu_time_us: 247271 } process_cpu_time_us: 740 total_duration_us: 294093 total_cpu_time_us: 254002 >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> TColumnShardTestSchema::DropWriteRace [GOOD] >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpExplain::PureExpr >> KqpQuery::DictJoin [GOOD] >> KqpQuery::QueryFromSqs [GOOD] |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |87.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |87.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2024-11-21T23:19:45.335976Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:19:45.565753Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:19:45.613683Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:19:45.613794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:19:45.614101Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:19:45.640807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:45.641067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:45.641429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:45.641543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:45.641653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:19:45.641776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:19:45.641894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:19:45.642010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:19:45.642112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:19:45.642237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:19:45.642368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:19:45.642497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:19:45.687470Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:19:45.691655Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:19:45.691925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:19:45.692090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:19:45.692291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:45.692468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:45.692562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:45.692616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:19:45.692746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:19:45.692820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:45.692865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:45.692912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:19:45.693097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:45.693158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:45.693198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:45.693225Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:19:45.693309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:19:45.693377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:45.693424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:45.693450Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:19:45.693509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:45.693550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:45.693580Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:19:45.693630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:45.693674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:45.693702Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:19:45.693870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2024-11-21T23:19:45.693953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2024-11-21T23:19:45.694058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2024-11-21T23:19:45.694153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2024-11-21T23:19:45.694418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:45.694475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:45.694506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:19:45.694696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:19:45.694739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:19:45.694779Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:19:45.694928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:19:45.694971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:19:45.694999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:19:45.695197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:19:45.695251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:19:45.695288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:19:45.695397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... 9:46.322109Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=88923004754528;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=89197881133632;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T23:19:46.322235Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=88923004754528;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=89197881133632;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=tx_controller.cpp:371;event=start;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;; 2024-11-21T23:19:46.322297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=88923004754528;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=89197881133632;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:97:2132]; 2024-11-21T23:19:46.322386Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;this=88923004754528;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:0;;int_this=89197881133632;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2024-11-21T23:19:46.322791Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000001 at tablet 9437184, mediator 0 2024-11-21T23:19:46.322873Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2024-11-21T23:19:46.323047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:19:46.323341Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2024-11-21T23:19:46.330277Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T23:19:46.330495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=tables_manager.cpp:259;method=RegisterTable;path_id=1; 2024-11-21T23:19:46.330550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine.h:339;event=RegisterTable;path_id=1; 2024-11-21T23:19:46.346978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T23:19:46.378333Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2024-11-21T23:19:46.378617Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6120;columns=10; 2024-11-21T23:19:46.383300Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvWrite;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:19:46.383397Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvWrite;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:19:46.383457Z node 1 :TX_COLUMNSHARD DEBUG: Write (blob) 6120 bytes into pathId 1 {object=write_monitor;count=1;size=6120} at tablet 9437184 2024-11-21T23:19:46.402336Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];fline=actor.cpp:22;event=flush_writing;size=6120;count=1; 2024-11-21T23:19:46.407038Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 at tablet 9437184 2024-11-21T23:19:46.407529Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2024-11-21T23:19:46.425237Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2024-11-21T23:19:46.425371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:19:46.426022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=102;this=88923005047520;method=TTxController::StartProposeOnExecute;tx_info=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:311;event=start; 2024-11-21T23:19:46.426770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=102;this=88923005047520;method=TTxController::StartProposeOnExecute;tx_info=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;fline=tx_controller.cpp:340;event=registered; 2024-11-21T23:19:46.450363Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;this=88923005047520;op_tx=102:TX_KIND_COMMIT;min=1732231186472;max=1732231216472;plan=0;src=[1:97:2132];cookie=0;int_op_tx=102:TX_KIND_COMMIT;min=1732231186472;max=1732231216472;plan=0;src=[1:97:2132];cookie=0;int_this=88991723008944;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T23:19:46.450504Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;this=88923005047520;op_tx=102:TX_KIND_COMMIT;min=1732231186472;max=1732231216472;plan=0;src=[1:97:2132];cookie=0;int_op_tx=102:TX_KIND_COMMIT;min=1732231186472;max=1732231216472;plan=0;src=[1:97:2132];cookie=0;int_this=88991723008944;method=TTxController::FinishProposeOnComplete;tx_id=102;fline=tx_controller.cpp:371;event=start;tx_info=102:TX_KIND_COMMIT;min=1732231186472;max=1732231216472;plan=0;src=[1:97:2132];cookie=0; 2024-11-21T23:19:46.450578Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=102:TX_KIND_COMMIT;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=0;this=88923005047520;op_tx=102:TX_KIND_COMMIT;min=1732231186472;max=1732231216472;plan=0;src=[1:97:2132];cookie=0;int_op_tx=102:TX_KIND_COMMIT;min=1732231186472;max=1732231216472;plan=0;src=[1:97:2132];cookie=0;int_this=88991723008944;method=TTxController::FinishProposeOnComplete;tx_id=102;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=102; 2024-11-21T23:19:46.451081Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=103;this=88923005048192;method=TTxController::StartProposeOnExecute;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;fline=tx_controller.cpp:311;event=start; 2024-11-21T23:19:46.451615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=103;this=88923005048192;method=TTxController::StartProposeOnExecute;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;fline=tx_controller.cpp:340;event=registered; 2024-11-21T23:19:46.471035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=88923005048192;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=89197881261888;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T23:19:46.471175Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=88923005048192;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=89197881261888;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=tx_controller.cpp:371;event=start;tx_info=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;; 2024-11-21T23:19:46.471252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=88923005048192;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=89197881261888;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:97:2132]; 2024-11-21T23:19:46.471363Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;this=88923005048192;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:97:2132];cookie=00:2;;int_this=89197881261888;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2024-11-21T23:19:46.471754Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000002 at tablet 9437184, mediator 0 2024-11-21T23:19:46.471832Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2024-11-21T23:19:46.471969Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:19:46.472164Z node 1 :TX_COLUMNSHARD DEBUG: DropTable for pathId: 1 at tablet 9437184 2024-11-21T23:19:46.489335Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2024-11-21T23:19:46.489533Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:19:46.489853Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000003 at tablet 9437184, mediator 0 2024-11-21T23:19:46.489937Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[7] execute at tablet 9437184 2024-11-21T23:19:46.490077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:19:46.490379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=execute;fline=insert_table.cpp:50;event=abort_insertion;path_id=1;blob_range={ Blob: DS:0:[9437184:2:1:3:0:7080:0] Offset: 0 Size: 7080 }; 2024-11-21T23:19:46.503188Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[7] complete at tablet 9437184 2024-11-21T23:19:46.503393Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForBadRequestExecute [GOOD] Test command err: Trying to start YDB, gRPC: 8264, MsgBus: 9413 2024-11-21T23:19:21.662298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876188520596774:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:21.662525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a6/r3tmp/tmpT1HsOv/pdisk_1.dat 2024-11-21T23:19:22.096855Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:22.136608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:22.136725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:22.137869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8264, node 1 2024-11-21T23:19:22.239718Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:22.239759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:22.239765Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:22.239853Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9413 TClient is connected to server localhost:9413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:23.245788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:23.276302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:23.432615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:23.638904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:23.759540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:26.658569Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876188520596774:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:26.658649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:27.554836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876214290402108:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:27.585910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:27.933681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.991835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.053078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.122706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.178684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.284407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:28.395773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876218585369907:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:28.395863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:28.396287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876218585369912:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:28.400841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:28.419990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876218585369914:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:31.319001Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231171272, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 19534, MsgBus: 5572 2024-11-21T23:19:32.419949Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876234953358109:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a6/r3tmp/tmpB2wf5O/pdisk_1.dat 2024-11-21T23:19:32.522030Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:32.599914Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:32.651269Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:32.651361Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:32.653386Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19534, node 2 2024-11-21T23:19:32.718827Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:32.718851Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:32.718860Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:32.718964Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5572 TClient is connected to server localhost:5572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:33.195755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.207261Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:33.225020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.352516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.545466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.634441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:36.217843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:74 ... :19:36.218052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.251747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.341377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.394510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.452920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.528359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.572829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.622114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876252133229322:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.622199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.626113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876252133229327:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.630058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:36.640309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876252133229329:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:37.405436Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876234953358109:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:37.405512Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:38.132025Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231178097, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 15493, MsgBus: 62013 2024-11-21T23:19:39.091013Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876265424112854:2126];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:39.091298Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a6/r3tmp/tmpaZFZj2/pdisk_1.dat 2024-11-21T23:19:39.246458Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:39.276338Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:39.276437Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:39.277696Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15493, node 3 2024-11-21T23:19:39.345398Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:39.345424Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:39.345433Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:39.345534Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62013 TClient is connected to server localhost:62013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:39.966579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:39.974827Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:39.994298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:40.100530Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:40.340349Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:40.461847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.423711Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876282603983658:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.423798Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.477445Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.533968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.579646Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.631914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.691268Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.754759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.856364Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876282603984156:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.856461Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.856706Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876282603984162:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.861841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:43.872430Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876282603984164:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:44.090483Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876265424112854:2126];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:44.090555Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:45.510223Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876291193919105:2468], status: GENERIC_ERROR, issues:
:2:12: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... 2024-11-21T23:19:45.516525Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDBmNmQ0ZmUtNGRlMDhlMGYtZDAzN2Y4NTEtODk1NDQ2YWI=, ActorId: [3:7439876291193919090:2463], ActorState: ExecuteState, TraceId: 01jd8gch2t2v9pyd2bt3yc9bj9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:12: Error: Unexpected token 'INCORRECT_STMT' : cannot match to any predicted input... >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpParams::RowsList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 16820, MsgBus: 21170 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00289e/r3tmp/tmp9yy6BK/pdisk_1.dat 2024-11-21T23:19:23.817845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876196046282619:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:23.818979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:24.481386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:24.481498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:24.486917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 1 2024-11-21T23:19:24.523090Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:24.699575Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:19:24.799389Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:19:24.800400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:24.800420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:24.800429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:24.800528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21170 TClient is connected to server localhost:21170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:26.142122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:26.252303Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:26.265498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:30.045391Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876196046282619:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:30.447716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:31.502067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876230406025804:2604], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.502181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.504596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876230406025846:2608], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.513827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:19:31.535389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876230406025848:2609], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } Trying to start YDB, gRPC: 16820, MsgBus: 21171 2024-11-21T23:19:34.123739Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876243290922862:2183];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:34.123798Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00289e/r3tmp/tmpiOxF12/pdisk_1.dat 2024-11-21T23:19:34.468284Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16820, node 2 2024-11-21T23:19:34.579125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:34.579227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:34.598978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:34.719025Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:34.719050Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:34.719057Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:34.719152Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21171 TClient is connected to server localhost:21171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:35.467825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:35.496843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:38.782875Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876260470796873:2603], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:38.783031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:38.784510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876260470796887:2606], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:38.790039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:19:38.805827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876260470796893:2607], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:19:39.128225Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876243290922862:2183];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:39.131000Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 200837 table_access { name: "/Root/ManyShardsTable" reads { rows: 1100 bytes: 8800 } partitions_count: 100 } cpu_time_us: 128021 affected_shards: 100 } compilation { duration_us: 139538 cpu_time_us: 131506 } process_cpu_time_us: 235 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"ManyShardsTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"DurationUs\":{\"Count\":4,\"Sum\":567000,\"Max\":150000,\"Min\":134000},\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":583,\"Max\":155,\"Min\":141},\"ActiveMessageMs\":{\"Count\":4,\"Max\":155,\"Min\":7},\"FirstMessageMs\":{\"Count\":4,\"Sum\":28,\"Max\":7,\"Min\":7},\"Bytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":555000,\"Max\":148000,\"Min\":134000}},\"Name\":\"4\",\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":583,\"Max\":155,\"Min\":141},\"Chunks\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\" ... ,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":580,\"Max\":152,\"Min\":141},\"ActiveMessageMs\":{\"Count\":4,\"Max\":152,\"Min\":6},\"FirstMessageMs\":{\"Count\":4,\"Sum\":27,\"Max\":7,\"Min\":6},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":553000,\"Max\":146000,\"Min\":134000}},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":580,\"Max\":152,\"Min\":141},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":580,\"Max\":152,\"Min\":141},\"FirstMessageMs\":{\"Count\":4,\"Sum\":27,\"Max\":7,\"Min\":6},\"ActiveMessageMs\":{\"Count\":4,\"Max\":152,\"Min\":6},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768},\"PauseMessageMs\":{\"Count\":4,\"Sum\":5,\"Max\":2,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":553000,\"Max\":146000,\"Min\":134000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":528939,\"Max\":143442,\"Min\":123594},\"WaitPeriods\":{\"Count\":4,\"Sum\":86,\"Max\":22,\"Min\":20}}}]}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":157,\"Max\":157,\"Min\":157},\"ActiveMessageMs\":{\"Count\":1,\"Max\":157,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":7691,\"Max\":7691,\"Min\":7691},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":149000,\"Max\":149000,\"Min\":149000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":157,\"Max\":157,\"Min\":157},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":155,\"Max\":155,\"Min\":155},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveMessageMs\":{\"Count\":1,\"Max\":157,\"Min\":8},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":149000,\"Max\":149000,\"Min\":149000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":130243,\"Max\":130243,\"Min\":130243},\"WaitPeriods\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20},\"WaitMessageMs\":{\"Count\":1,\"Max\":155,\"Min\":1}}}],\"DurationUs\":{\"Count\":1,\"Sum\":149000,\"Max\":149000,\"Min\":149000},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576},\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7691,\"Max\":7691,\"Min\":7691},\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":149000,\"BaseTimeMs\":1732231179095,\"WaitInputTimeUs\":{\"Count\":1,\"Sum\":128373,\"Max\":128373,\"Min\":128373},\"OutputBytes\":{\"Count\":1,\"Sum\":7691,\"Max\":7691,\"Min\":7691},\"CpuTimeUs\":{\"Count\":1,\"Sum\":27868,\"Max\":27868,\"Min\":27868},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":26,\"Max\":26,\"Min\":26},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":156,\"Max\":156,\"Min\":156},\"ActiveMessageMs\":{\"Count\":1,\"Max\":156,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":148000,\"Max\":148000,\"Min\":148000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":156,\"Max\":156,\"Min\":156},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":155,\"Max\":155,\"Min\":155},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveMessageMs\":{\"Count\":1,\"Max\":156,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":148000,\"Max\":148000,\"Min\":148000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":32470,\"Max\":32470,\"Min\":32470},\"WaitPeriods\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"WaitMessageMs\":{\"Count\":1,\"Max\":155,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":139538,\"CpuTimeUs\":131506},\"ProcessCpuTimeUs\":235,\"TotalDurationUs\":477829,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":129595},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'353) \'(\'\"_id\" \'\"fda0833b-f116dd44-94808094-8a9a7e4e\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'365) \'(\'\"_id\" \'\"1a81868b-1fdc6142-501672f3-53223d3e\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 477829 total_cpu_time_us: 259762 Trying to start YDB, gRPC: 16820, MsgBus: 21172 2024-11-21T23:19:40.589318Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876269060726719:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:40.589360Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00289e/r3tmp/tmpSj1xBu/pdisk_1.dat 2024-11-21T23:19:40.805998Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:40.855344Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:40.855482Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:40.857246Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 3 2024-11-21T23:19:41.038299Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:41.038319Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:41.038325Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:41.038686Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21172 TClient is connected to server localhost:21172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:41.735733Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.744041Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:41.773540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:45.302602Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876290535568209:2607], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:45.302729Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:45.303134Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876290535568222:2610], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:45.307468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2024-11-21T23:19:45.344417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876290535568225:2611], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2024-11-21T23:19:45.621264Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876269060726719:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:45.621317Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 14272, MsgBus: 2437 2024-11-21T23:19:20.081710Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876181151339694:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:20.082057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a9/r3tmp/tmpKa0ljC/pdisk_1.dat 2024-11-21T23:19:20.893594Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:20.937601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:20.937681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:20.940892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14272, node 1 2024-11-21T23:19:21.142897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:21.142919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:21.142925Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:21.143005Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2437 TClient is connected to server localhost:2437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:22.362319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:22.391049Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:22.407816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:22.613379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:22.811268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:19:22.890739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.966471Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876181151339694:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:24.966544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:25.096639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876206921145034:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.096759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:25.679583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.730938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:25.840792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.052700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.122029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.264354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:26.460738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876211216112849:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:26.460840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:26.461224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876211216112854:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:26.466124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:26.491606Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:26.492598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876211216112856:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:28.660515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62493, MsgBus: 61403 2024-11-21T23:19:31.826996Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876230483380807:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:31.827049Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a9/r3tmp/tmpbOeH03/pdisk_1.dat 2024-11-21T23:19:32.256761Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:32.266881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:32.266965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:32.269144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62493, node 2 2024-11-21T23:19:32.602966Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:32.602991Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:32.603006Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:32.603106Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61403 TClient is connected to server localhost:61403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:19:33.752554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:33.772208Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:33.788975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.962098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:34.306413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 7205759 ... :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876256253186795:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.636609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:37.653616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876256253186797:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 (Uint32 '1)) (let $5 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) '((KqlKeyExc $4 (String '"Name")) (KqlKeyInc $4)))) (let $6 (OptionalType (DataType 'String))) (let $7 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $6) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $6))) (let $8 '('('"_logical_id" '643) '('"_id" '"bb58d35e-93958620-a360eff4-1166dd8") '('"_wide_channels" $7))) (let $9 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $5)) (lambda '($13) (block '( (let $14 (lambda '($15) (Member $15 '"Amount") (Member $15 '"Comment") (Member $15 '"Group") (Member $15 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $13) $3) $14))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '"0"))) (let $11 (DqPhyStage '($10) (lambda '($16) (FromFlow (NarrowMap (Take (ToFlow $16) $3) (lambda '($17 $18 $19 $20) (AsStruct '('"Amount" $17) '('"Comment" $18) '('"Group" $19) '('"Name" $20)))))) '('('"_logical_id" '656) '('"_id" '"856745b1-9a6824ab-a18142c3-ed5d16ea")))) (let $12 (DqCnResult (TDqOutput $11 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($9 $11) '($12) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $7) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","ReadLimit":"1001","Inputs":[],"E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","ReadLimit":"1001","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 26106, MsgBus: 10452 2024-11-21T23:19:39.997089Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876264282654232:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:39.997149Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a9/r3tmp/tmpT808fv/pdisk_1.dat 2024-11-21T23:19:40.112279Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:40.126361Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:40.126726Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:40.128229Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26106, node 3 2024-11-21T23:19:40.210587Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:40.210609Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:40.210618Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:40.210739Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10452 TClient is connected to server localhost:10452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:40.760692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:40.766956Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:40.778032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:40.856673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.045759Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.136344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:43.644758Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876281462525107:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.644896Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.707997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.774225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.842599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.901476Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.949834Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.043489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.156944Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876285757492908:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.157076Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.157411Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876285757492913:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.162326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:44.177602Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876285757492915:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:44.998508Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876264282654232:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:44.998595Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:45.693147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 21269, MsgBus: 25477 2024-11-21T23:19:21.621269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876188044805791:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:21.621421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a7/r3tmp/tmpItEHRL/pdisk_1.dat 2024-11-21T23:19:22.304772Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:22.309281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:22.309389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:22.312331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21269, node 1 2024-11-21T23:19:22.598889Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:22.598911Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:22.598918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:22.598999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25477 TClient is connected to server localhost:25477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:23.555539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:23.607175Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:23.626441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:23.927025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.170827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:24.306561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:26.615298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876188044805791:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:26.615357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:28.200830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876218109578432:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:28.200947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:28.945022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.011689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.093355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.152689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.220501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.316889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:29.492060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876222404546252:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.492143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.492359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876222404546257:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:29.496445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:29.524797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876222404546259:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:31.163590Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876230994481181:2475], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2024-11-21T23:19:31.167748Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmVmZmZkNTYtOTk0ZWZhMjQtNDc2YmM5YTktOGE3ODk0Mjg=, ActorId: [1:7439876230994481172:2470], ActorState: ExecuteState, TraceId: 01jd8gc31ydpt8rm0n764g7d0t, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2024-11-21T23:19:31.234446Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876230994481196:2479], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2024-11-21T23:19:31.240531Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmVmZmZkNTYtOTk0ZWZhMjQtNDc2YmM5YTktOGE3ODk0Mjg=, ActorId: [1:7439876230994481172:2470], ActorState: ExecuteState, TraceId: 01jd8gc346bn1a587262q5wr1a, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2024-11-21T23:19:31.289074Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876230994481224:2483], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2024-11-21T23:19:31.295617Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmVmZmZkNTYtOTk0ZWZhMjQtNDc2YmM5YTktOGE3ODk0Mjg=, ActorId: [1:7439876230994481172:2470], ActorState: ExecuteState, TraceId: 01jd8gc35jeh1sztkd23bsaem4, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 Trying to start YDB, gRPC: 19810, MsgBus: 32081 2024-11-21T23:19:32.286696Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876237011359785:2052];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a7/r3tmp/tmpyFYBXE/pdisk_1.dat 2024-11-21T23:19:32.342660Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:32.501090Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19810, node 2 2024-11-21T23:19:32.620580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:32.620734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:32.697773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:32.767112Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:32.767146Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:32.767154Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:32.767270Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32081 TClient is connected to server localhost:32081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersio ... 2024-11-21T23:19:33.603644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.703856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:33.909165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:34.029485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:36.786604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876254191230683:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.786724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:36.840681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.883827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.926795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:36.974868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.013256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.098159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.155020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876258486198476:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.155104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.155505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876258486198481:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:37.159238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:37.168537Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:37.169136Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876258486198483:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:37.282495Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876237011359785:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:37.282560Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61171, MsgBus: 15696 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028a7/r3tmp/tmp3F9GzE/pdisk_1.dat 2024-11-21T23:19:40.018643Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:40.034216Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:40.067548Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:40.067675Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:40.069073Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61171, node 3 2024-11-21T23:19:40.251217Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:40.251249Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:40.251257Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:40.251379Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15696 TClient is connected to server localhost:15696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:40.845951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:40.853180Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:40.868196Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:40.960881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.144050Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.240035Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:44.103616Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876285463664528:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.103699Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.171193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.263287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.302623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.354684Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.394101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.453484Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.519636Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876285463665031:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.519744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.519923Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876285463665036:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.524543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:44.541253Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876285463665038:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> KqpLimits::DatashardProgramSize [GOOD] >> KqpLimits::ComputeNodeMemoryLimit >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> TSchemeShardAuditSettings::CreateSubdomain |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> KqpParams::MissingParameter [GOOD] >> KqpParams::ParameterTypes >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> TSchemeShardAuditSettings::CreateExtSubdomain >> KqpQuery::UpdateWhereInSubquery [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> BasicUsage::SimpleHandlers [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin-ColumnStore >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::FewEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:19:50.408852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:19:50.408946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:19:50.408980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:19:50.409031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:19:50.409079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:19:50.409228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:19:50.409307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:19:50.409586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:19:50.490960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:19:50.491017Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:50.503770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:19:50.508259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:19:50.508475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:19:50.515680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:19:50.516312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:19:50.516913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:19:50.517247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:19:50.521973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:19:50.523319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:19:50.523375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:19:50.523610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:19:50.523655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:19:50.523692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:19:50.523794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:19:50.532777Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:19:50.671360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:19:50.671651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:50.671880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:19:50.672116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:19:50.672178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:50.674856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:19:50.675039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:19:50.675241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:50.675294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:19:50.675341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:19:50.675374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:19:50.677856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:50.677911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:19:50.677951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:19:50.683395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:50.683479Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:50.683523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:19:50.683572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:19:50.687710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:19:50.691033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:19:50.691246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:19:50.692283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:19:50.692445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:19:50.692509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:19:50.692842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:19:50.692895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:19:50.693075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:19:50.693174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:19:50.695539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:19:50.695597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:19:50.695794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:19:50.695831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:19:50.696154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:50.696203Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:19:50.696292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:19:50.696330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:19:50.696372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:19:50.696409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:19:50.696458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:19:50.696485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:19:50.696559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:19:50.696628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:19:50.696659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:19:50.698568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:19:50.698682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:19:50.698720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:19:50.698775Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:19:50.698826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:19:50.698928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... etId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 112 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:19:51.165715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:51.165805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:51.165836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:19:51.165867Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2024-11-21T23:19:51.165899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:19:51.166948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:51.167016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:51.167039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:19:51.167079Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2024-11-21T23:19:51.167123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T23:19:51.167251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T23:19:51.171309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2024-11-21T23:19:51.171440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2024-11-21T23:19:51.172807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:19:51.172939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:19:51.172995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2024-11-21T23:19:51.173056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:19:51.173104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T23:19:51.173216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 130 2024-11-21T23:19:51.173403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:19:51.173464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T23:19:51.174433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T23:19:51.174746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2024-11-21T23:19:51.176584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:19:51.176621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:19:51.176769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T23:19:51.176876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:19:51.176927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2024-11-21T23:19:51.176965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2024-11-21T23:19:51.177256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.177304Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2024-11-21T23:19:51.177389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2024-11-21T23:19:51.177425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T23:19:51.177467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2024-11-21T23:19:51.177503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T23:19:51.177531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2024-11-21T23:19:51.177560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2024-11-21T23:19:51.177629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T23:19:51.177659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2024-11-21T23:19:51.177685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 27 2024-11-21T23:19:51.177704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2024-11-21T23:19:51.178384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:51.178509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:51.178546Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:19:51.178579Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2024-11-21T23:19:51.178611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:19:51.179923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:51.180021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:51.180054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:19:51.180095Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T23:19:51.180129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T23:19:51.180208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2024-11-21T23:19:51.183096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:19:51.183404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T23:19:51.183498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T23:19:51.183989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:19:51.184038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T23:19:51.184095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:19:51.186791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T23:19:51.189100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T23:19:51.189222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:19:51.189282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2024-11-21T23:19:51.189617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2024-11-21T23:19:51.189647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2024-11-21T23:19:51.190153Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2024-11-21T23:19:51.190258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2024-11-21T23:19:51.190304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:652:2644] TestWaitNotification: OK eventTxId 112 >> KqpFlipJoin::Inner_1 >> KqpQuery::QueryCache [GOOD] >> KqpQuery::QueryCacheInvalidate >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateWhereInSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 2571, MsgBus: 7718 2024-11-21T23:19:23.579117Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876196374519399:2092];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:23.579673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00289f/r3tmp/tmpJtHK9c/pdisk_1.dat 2024-11-21T23:19:24.167283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:24.169209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:24.175716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:24.244834Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2571, node 1 2024-11-21T23:19:24.515039Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:24.515064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:24.515070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:24.515770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7718 TClient is connected to server localhost:7718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:25.840783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:25.878546Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:25.904900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:26.217964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:26.984270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.125259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:28.578754Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876196374519399:2092];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:28.578826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:30.606301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876226439292151:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:30.618647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:30.980642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:31.035359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:31.097057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:31.132998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:31.189270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:31.276477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:31.374578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876230734259962:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.374639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.375113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876230734259967:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:31.387369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:31.403979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876230734259969:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:33.901638Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439876239324194979:2480], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jd8gc58afsns60wdss6w70e4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzM0MmNjY2ItZWJkNDc4ODItZjFiZDc2N2ItMWMyZmNlMzA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. }. 2024-11-21T23:19:33.921656Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439876239324194980:2481], TxId: 281474976710671, task: 2. Ctx: { TraceId : 01jd8gc58afsns60wdss6w70e4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzM0MmNjY2ItZWJkNDc4ODItZjFiZDc2N2ItMWMyZmNlMzA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439876239324194959:2470], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:33.946579Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzM0MmNjY2ItZWJkNDc4ODItZjFiZDc2N2ItMWMyZmNlMzA=, ActorId: [1:7439876239324194903:2470], ActorState: ExecuteState, TraceId: 01jd8gc58afsns60wdss6w70e4, Create QueryResponse for error on request, msg:
: Error: yql/essentials/minikql/computation/mkql_value_builder.cpp:52: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 30198, MsgBus: 28536 2024-11-21T23:19:35.135478Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876246290551587:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:35.281200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00289f/r3tmp/tmpLVFzX8/pdisk_1.dat 2024-11-21T23:19:35.455641Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:35.461934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:35.462040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:35.465614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30198, node 2 2024-11-21T23:19:35.721065Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:35.721100Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:35.721109Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:35.721208Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28536 TClient is connected to server localhost:28536 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp ... ... 2024-11-21T23:19:36.552228Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:36.566620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:36.705636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:19:37.128354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:37.298167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:40.093247Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876246290551587:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:40.093327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:40.220968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876267765389605:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:40.221058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:40.288336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.333444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.390517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.469759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.555310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.609324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:40.682746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876267765390107:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:40.682910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:40.686746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876267765390113:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:40.693471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:40.704123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876267765390115:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 14236, MsgBus: 3555 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00289f/r3tmp/tmpVSNmEC/pdisk_1.dat 2024-11-21T23:19:43.529979Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:43.619453Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14236, node 3 2024-11-21T23:19:43.695034Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:43.695123Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:43.730775Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:43.880104Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:43.880133Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:43.880141Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:43.880260Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3555 TClient is connected to server localhost:3555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:44.445839Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:44.455684Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:44.463935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:44.551849Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:44.725002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:44.875519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.934206Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876299112317923:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:47.934281Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:47.976718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.018178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.088968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.124655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.185122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.383159Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.464692Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876303407285721:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.464774Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.464977Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876303407285726:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.468673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:48.483996Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876303407285728:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckCacheByAst >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> KqpAnalyze::AnalyzeTable+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:19:51.401131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:19:51.401224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:19:51.401263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:19:51.401322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:19:51.401369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:19:51.401417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:19:51.401482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:19:51.401851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:19:51.484480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:19:51.484547Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:51.497603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:19:51.500931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:19:51.501123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:19:51.506828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:19:51.507499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:19:51.508230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:19:51.508600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:19:51.513170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:19:51.514448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:19:51.514497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:19:51.514728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:19:51.514778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:19:51.514815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:19:51.514907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.524439Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:19:51.657314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:19:51.657622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.657855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:19:51.658102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:19:51.658168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.664853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:19:51.665023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:19:51.665236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.665315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:19:51.665373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:19:51.665413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:19:51.671221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.671307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:19:51.671348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:19:51.674208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.674298Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.674346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:19:51.674440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:19:51.679702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:19:51.682332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:19:51.682574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:19:51.683626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:19:51.683813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:19:51.683865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:19:51.684200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:19:51.684257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:19:51.684444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:19:51.684550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:19:51.687311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:19:51.687371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:19:51.687629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:19:51.687662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:19:51.687934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:19:51.687997Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:19:51.688100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:19:51.688140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:19:51.688185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:19:51.688242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:19:51.688298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:19:51.688329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:19:51.688430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:19:51.688475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:19:51.688506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:19:51.690125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:19:51.690217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:19:51.690243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:19:51.690282Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:19:51.690323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:19:51.690441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... 94046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:52.087476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:52.087492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:19:52.087513Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2024-11-21T23:19:52.087531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:19:52.088087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:52.088142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:52.088159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:19:52.088175Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2024-11-21T23:19:52.088191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T23:19:52.088234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T23:19:52.090348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2024-11-21T23:19:52.090531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2024-11-21T23:19:52.091483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:19:52.091652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:19:52.091691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2024-11-21T23:19:52.091753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:19:52.091790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T23:19:52.091824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 134 2024-11-21T23:19:52.095236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T23:19:52.095734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T23:19:52.097399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T23:19:52.097467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:19:52.097581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 134 -> 135 2024-11-21T23:19:52.097737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:19:52.097802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 FAKE_COORDINATOR: Erasing txId 112 2024-11-21T23:19:52.099571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:19:52.099627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:19:52.099774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2024-11-21T23:19:52.099880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:19:52.099925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2024-11-21T23:19:52.099956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2024-11-21T23:19:52.100010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T23:19:52.100043Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2024-11-21T23:19:52.100068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 135 -> 240 2024-11-21T23:19:52.100979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:52.101060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:52.101089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:19:52.101114Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2024-11-21T23:19:52.101155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:19:52.101983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:52.102057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2024-11-21T23:19:52.102081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2024-11-21T23:19:52.102113Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2024-11-21T23:19:52.102153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2024-11-21T23:19:52.102221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2024-11-21T23:19:52.108934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2024-11-21T23:19:52.109015Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 112:0 ProgressState 2024-11-21T23:19:52.109096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2024-11-21T23:19:52.109134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T23:19:52.109173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2024-11-21T23:19:52.109209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2024-11-21T23:19:52.109245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2024-11-21T23:19:52.109271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2024-11-21T23:19:52.109344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2024-11-21T23:19:52.109805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:19:52.109842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T23:19:52.109892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2024-11-21T23:19:52.110286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:19:52.110333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2024-11-21T23:19:52.110382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:19:52.110820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T23:19:52.111727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2024-11-21T23:19:52.114101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:19:52.114161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2024-11-21T23:19:52.114521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2024-11-21T23:19:52.114566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2024-11-21T23:19:52.115061Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2024-11-21T23:19:52.115152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2024-11-21T23:19:52.115181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:660:2652] TestWaitNotification: OK eventTxId 112 >> KqpJoin::ExclusionJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2024-11-21T23:18:41.835888Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1732231121835854 2024-11-21T23:18:42.229420Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876018621681168:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:42.230584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:42.264284Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876021972698084:2206];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:42.446166Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:42.463355Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00130d/r3tmp/tmpTaLk2o/pdisk_1.dat 2024-11-21T23:18:42.517002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:42.684658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:42.684765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:42.687036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:42.687131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:42.691926Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:42.692064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:42.693973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:42.749592Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17192, node 1 2024-11-21T23:18:42.759586Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:42.759822Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:42.833092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/00130d/r3tmp/yandexagAHBc.tmp 2024-11-21T23:18:42.833122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/00130d/r3tmp/yandexagAHBc.tmp 2024-11-21T23:18:42.833302Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/00130d/r3tmp/yandexagAHBc.tmp 2024-11-21T23:18:42.833432Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:42.885633Z INFO: TTestServer started on Port 10563 GrpcPort 17192 TClient is connected to server localhost:10563 PQClient connected to localhost:17192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:43.230063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:18:45.684556Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876034857600101:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.684638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876034857600124:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.684729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:45.691393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:18:45.720324Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876034857600130:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:18:45.921677Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876031506584059:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:45.921933Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWQyYjZjZi1mZWRjZTg0Yi04Nzk0MDJlOC1hYWVhOTI3MQ==, ActorId: [1:7439876031506583998:2302], ActorState: ExecuteState, TraceId: 01jd8gapp5cgs4dj9hpy16p33k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:45.923124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:18:45.923614Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876034857600173:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:45.924229Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:45.925339Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWJiMWM1YmUtNDk1OWJiNjEtYzg1MjNhMzUtNzdjN2RhYTc=, ActorId: [2:7439876034857600099:2280], ActorState: ExecuteState, TraceId: 01jd8gapnh0cajsv8w6pt1jkpm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:45.925728Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:46.053604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:46.222918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:17192", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:18:46.690862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8gaqarea95wbwr25dts91f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA2Y2IxNDItMTlhNzQ3YWQtMjlkYWYzMGItNDU5MDdmZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439876035801551790:2969] 2024-11-21T23:18:47.229695Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876018621681168:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:47.229776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:47.263407Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876021972698084:2206];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:47.263491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:18:52.474986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:17192 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2024-11-21T23:18:52.544721Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:17192 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } ... ionLifetimeMs: 3267 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:49.879090Z :INFO: [/Root] [/Root] [8f887792-4dbdbf6c-f4f2d553-ae95ce44] Closing read session. Close timeout: 18446744073709.551615s 2024-11-21T23:19:49.879132Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2024-11-21T23:19:49.879160Z :INFO: [/Root] [/Root] [8f887792-4dbdbf6c-f4f2d553-ae95ce44] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3267 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:49.879429Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0] Write session: close. Timeout = 0 ms 2024-11-21T23:19:49.879464Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0] Write session will now close 2024-11-21T23:19:49.879497Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0] Write session: aborting 2024-11-21T23:19:49.879856Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0] Write session: gracefully shut down, all writes complete 2024-11-21T23:19:49.879895Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0] Write session: destroy 2024-11-21T23:19:49.881176Z :INFO: [/Root] [/Root] [e2ca07e-e7c02f32-1c075c16-27c6f7a3] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:49.881230Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:19:49.881269Z :INFO: [/Root] [/Root] [e2ca07e-e7c02f32-1c075c16-27c6f7a3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3307 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:49.881300Z :INFO: [/Root] [/Root] [d87a711-42cf6d80-9ecd47f-7c9a09a3] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:49.881321Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:19:49.881345Z :INFO: [/Root] [/Root] [d87a711-42cf6d80-9ecd47f-7c9a09a3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3270 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:49.881366Z :INFO: [/Root] [/Root] [8f887792-4dbdbf6c-f4f2d553-ae95ce44] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:49.881393Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2024-11-21T23:19:49.881416Z :INFO: [/Root] [/Root] [8f887792-4dbdbf6c-f4f2d553-ae95ce44] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3269 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:49.881454Z :INFO: [/Root] [/Root] [8f887792-4dbdbf6c-f4f2d553-ae95ce44] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:49.881482Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2024-11-21T23:19:49.881518Z :INFO: [/Root] [/Root] [8f887792-4dbdbf6c-f4f2d553-ae95ce44] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3269 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:49.881615Z :NOTICE: [/Root] [/Root] [8f887792-4dbdbf6c-f4f2d553-ae95ce44] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:19:49.882574Z :INFO: [/Root] [/Root] [d87a711-42cf6d80-9ecd47f-7c9a09a3] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:49.882609Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:19:49.882659Z :INFO: [/Root] [/Root] [d87a711-42cf6d80-9ecd47f-7c9a09a3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3271 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:49.882603Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_12192021410042771069_v1 grpc read done: success# 0, data# { } 2024-11-21T23:19:49.882716Z :NOTICE: [/Root] [/Root] [d87a711-42cf6d80-9ecd47f-7c9a09a3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:19:49.882664Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12192021410042771069_v1 grpc read failed 2024-11-21T23:19:49.882695Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12192021410042771069_v1 grpc closed 2024-11-21T23:19:49.882717Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_12192021410042771069_v1 is DEAD 2024-11-21T23:19:49.883206Z :INFO: [/Root] [/Root] [e2ca07e-e7c02f32-1c075c16-27c6f7a3] Closing read session. Close timeout: 0.000000s 2024-11-21T23:19:49.883236Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:19:49.883263Z :INFO: [/Root] [/Root] [e2ca07e-e7c02f32-1c075c16-27c6f7a3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 3309 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:19:49.883313Z :NOTICE: [/Root] [/Root] [e2ca07e-e7c02f32-1c075c16-27c6f7a3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:19:49.883882Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_7885547206943714320_v1 grpc read done: success# 0, data# { } 2024-11-21T23:19:49.883909Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_7885547206943714320_v1 grpc read failed 2024-11-21T23:19:49.883928Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_7885547206943714320_v1 grpc closed 2024-11-21T23:19:49.883945Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_7885547206943714320_v1 is DEAD 2024-11-21T23:19:49.884373Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_13909994875421333241_v1 grpc read done: success# 0, data# { } 2024-11-21T23:19:49.884394Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_13909994875421333241_v1 grpc read failed 2024-11-21T23:19:49.884411Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_13909994875421333241_v1 grpc closed 2024-11-21T23:19:49.884442Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_13909994875421333241_v1 is DEAD 2024-11-21T23:19:49.884480Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0 grpc read done: success: 0 data: 2024-11-21T23:19:49.884509Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0 grpc read failed 2024-11-21T23:19:49.884530Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0 grpc closed 2024-11-21T23:19:49.884544Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|74be2e84-1816a0ff-bbb80c32-ab8d78d2_0 is DEAD 2024-11-21T23:19:49.885065Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:19:49.885290Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876294644224542:2501] disconnected; active server actors: 1 2024-11-21T23:19:49.885325Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876294644224542:2501] client user disconnected session shared/user_3_3_12192021410042771069_v1 2024-11-21T23:19:49.886769Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:49.894256Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_13909994875421333241_v1 2024-11-21T23:19:49.894162Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T23:19:49.894315Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876294644224554:2511] destroyed 2024-11-21T23:19:49.894407Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_13909994875421333241_v1 2024-11-21T23:19:49.894977Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876294644224541:2500] disconnected; active server actors: 1 2024-11-21T23:19:49.895010Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876294644224541:2500] client user disconnected session shared/user_3_2_7885547206943714320_v1 2024-11-21T23:19:49.895050Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876294644224543:2499] disconnected; active server actors: 1 2024-11-21T23:19:49.895066Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876294644224543:2499] client user disconnected session shared/user_3_1_13909994875421333241_v1 2024-11-21T23:19:49.906827Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:19:49.906882Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876294644224587:2512] destroyed 2024-11-21T23:19:49.939438Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:19:50.274552Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876311824094061:2550], TxId: 281474976715698, task: 1, CA Id [3:7439876311824094059:2550]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T23:19:50.310279Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876311824094061:2550], TxId: 281474976715698, task: 1, CA Id [3:7439876311824094059:2550]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:50.358678Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876311824094061:2550], TxId: 281474976715698, task: 1, CA Id [3:7439876311824094059:2550]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:50.423726Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876311824094061:2550], TxId: 281474976715698, task: 1, CA Id [3:7439876311824094059:2550]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:50.504572Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876311824094061:2550], TxId: 281474976715698, task: 1, CA Id [3:7439876311824094059:2550]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:19:50.656070Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876311824094061:2550], TxId: 281474976715698, task: 1, CA Id [3:7439876311824094059:2550]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |87.2%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRanges >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin-ColumnStore >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpLimits::TooBigQuery [GOOD] >> KqpLimits::TooBigKey >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup >> KqpParams::RowsList [GOOD] >> KqpQuery::CurrentUtcTimestamp >> KqpJoinOrder::TPCH10-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::RightSemi >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> KqpParams::ParameterTypes [GOOD] >> KqpParams::InvalidJson >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231714.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231714.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231714.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231714.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231714.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231714.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230514.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112231714.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112231714.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230514.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112230514.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112230514.000000s;Name=;Codec=}; 2024-11-21T23:18:34.691719Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:18:34.784894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:18:34.808618Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:18:34.808717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:18:34.808993Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:18:34.816719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:18:34.816957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:18:34.817169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:18:34.817274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:18:34.817370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:18:34.817481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:18:34.817596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:18:34.817980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:18:34.818108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:18:34.818206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:18:34.818303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:18:34.818417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:18:34.835777Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:18:34.835832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:18:34.840208Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:18:34.840437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:18:34.840474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:18:34.840599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:34.840719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:18:34.840769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:18:34.840795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:18:34.840849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:18:34.840889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:18:34.840928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:18:34.840957Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:18:34.841078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:34.841117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:18:34.841141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:18:34.841167Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:18:34.841232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:18:34.841267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:18:34.841296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:18:34.841313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:18:34.841358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:18:34.841379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:18:34.841396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:18:34.841427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:18:34.841451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:18:34.841469Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:18:34.841582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2024-11-21T23:18:34.841650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2024-11-21T23:18:34.841711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2024-11-21T23:18:34.841768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2024-11-21T23:18:34.841885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:18:34.841924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:18:34.841944Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Ex ... me=69; 2024-11-21T23:19:55.619973Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:19:55.620021Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=116; 2024-11-21T23:19:55.620114Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=52; 2024-11-21T23:19:55.620193Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2024-11-21T23:19:55.620430Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=196; 2024-11-21T23:19:55.621011Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=536; 2024-11-21T23:19:55.621109Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=49; 2024-11-21T23:19:55.621194Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=45; 2024-11-21T23:19:55.621238Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2024-11-21T23:19:55.621283Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2024-11-21T23:19:55.621326Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2024-11-21T23:19:55.621398Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2024-11-21T23:19:55.621444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2024-11-21T23:19:55.621536Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2024-11-21T23:19:55.621580Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2024-11-21T23:19:55.621644Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2024-11-21T23:19:55.621679Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=16463; 2024-11-21T23:19:55.621837Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:19:55.621952Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:19:55.621997Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard_impl.cpp:1558;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T23:19:55.622044Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:19:55.622165Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T23:19:55.622269Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T23:19:55.622374Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:19:55.622441Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:19:55.622541Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:19:55.622590Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:19:55.622659Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:19:55.622743Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T23:19:55.622818Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:19:55.622865Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:19:55.622920Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:19:55.622965Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:19:55.623016Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:19:55.623109Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:19:55.624033Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:19:55.624191Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1575:3444];tablet_id=9437184;parent=[1:1534:3410];fline=manager.h:99;event=ask_data;request=request_id=139;1={portions_count=21};; 2024-11-21T23:19:55.624982Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:19:55.629232Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:19:55.629284Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:19:55.629315Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:19:55.629365Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:19:55.629445Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:19:55.629537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T23:19:55.629609Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:19:55.629658Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:19:55.629710Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:19:55.629748Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:19:55.629789Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:19:55.629891Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:19:55.630299Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T23:19:55.631472Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T23:19:55.633145Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:19:55.633197Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2024-11-21T23:19:24.679792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:19:24.708977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:24.709241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000d40/r3tmp/tmplQTS5b/pdisk_1.dat 2024-11-21T23:19:26.851640Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.193497s 2024-11-21T23:19:26.851746Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.193677s 2024-11-21T23:19:26.976342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.180338Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:27.251855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:27.261806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:27.314882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:27.600446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.749562Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:19:27.750810Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:19:27.751364Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:19:27.766520Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:19:27.912785Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:19:27.913713Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:19:27.913860Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:19:27.915649Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:19:27.915721Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:19:27.920537Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:19:27.946439Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:19:28.057504Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:19:28.057757Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:19:28.057890Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:19:28.057931Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:19:28.057999Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:19:28.058052Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:28.074833Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.074946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.075816Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:19:28.075985Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:19:28.076208Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.076282Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.076336Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:19:28.076420Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:19:28.076496Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:19:28.076561Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:19:28.076664Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:19:28.076713Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:19:28.076752Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:19:28.076814Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:19:28.076993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:19:28.077048Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:19:28.077237Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:19:28.077522Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:19:28.077610Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:19:28.077753Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:19:28.077831Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:19:28.077878Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:19:28.077921Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:19:28.078005Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.094526Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:19:28.094637Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:19:28.094683Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:19:28.094728Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.094830Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:19:28.094871Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:19:28.094919Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:19:28.094956Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.095010Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:19:28.096799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:19:28.096867Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:19:28.111251Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:19:28.111345Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.111384Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.111496Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:19:28.111620Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:19:28.321066Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.321139Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.321185Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:19:28.321313Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:19:28.321356Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:19:28.321501Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.321556Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:19:28.321612Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:19:28.321672Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:19:28.340297Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:19:28.340407Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:28.341038Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.341086Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.341140Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-2 ... 6Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2024-11-21T23:19:55.344141Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2024-11-21T23:19:55.344185Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T23:19:55.344215Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2024-11-21T23:19:55.344247Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:19:55.344289Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:19:55.344378Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2024-11-21T23:19:55.344418Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2024-11-21T23:19:55.344465Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2024-11-21T23:19:55.344510Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T23:19:55.344541Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:19:55.344566Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2024-11-21T23:19:55.344591Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2024-11-21T23:19:55.344640Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T23:19:55.344663Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2024-11-21T23:19:55.344687Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2024-11-21T23:19:55.344712Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2024-11-21T23:19:55.344739Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T23:19:55.344773Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2024-11-21T23:19:55.344810Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2024-11-21T23:19:55.344832Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2024-11-21T23:19:55.344858Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T23:19:55.344879Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2024-11-21T23:19:55.344900Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T23:19:55.344921Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T23:19:55.344944Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T23:19:55.344966Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T23:19:55.344988Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2024-11-21T23:19:55.345009Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2024-11-21T23:19:55.345042Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2024-11-21T23:19:55.345200Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2024-11-21T23:19:55.345251Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T23:19:55.345328Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:19:55.345367Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2024-11-21T23:19:55.345410Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2024-11-21T23:19:55.345450Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2024-11-21T23:19:55.345695Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2024-11-21T23:19:55.345728Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2024-11-21T23:19:55.345769Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:19:55.345809Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:19:55.345845Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2024-11-21T23:19:55.345869Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:19:55.345901Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2024-11-21T23:19:55.345949Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:19:55.345990Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:19:55.346030Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:19:55.346068Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:19:55.359281Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 100} 2024-11-21T23:19:55.359381Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T23:19:55.359475Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:19:55.359533Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2024-11-21T23:19:55.359625Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:557:2484] 2024-11-21T23:19:55.359692Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:55.361151Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:729:2601], Recipient [6:631:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.361220Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.361280Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:728:2600], serverId# [6:729:2601], sessionId# [0:0:0] 2024-11-21T23:19:55.361451Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:727:2599], Recipient [6:631:2536]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T23:19:55.365149Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:732:2604], Recipient [6:631:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.365229Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.365285Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:731:2603], serverId# [6:732:2604], sessionId# [0:0:0] 2024-11-21T23:19:55.365599Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:730:2602], Recipient [6:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T23:19:55.365749Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:19:55.365815Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T23:19:55.365861Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2024-11-21T23:19:55.365949Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2024-11-21T23:19:55.366063Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T23:19:55.366112Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:19:55.366154Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:19:55.366198Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:19:55.366257Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2024-11-21T23:19:55.366299Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T23:19:55.366331Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:19:55.366354Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:19:55.366378Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:19:55.366513Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T23:19:55.366798Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:730:2602], 1000} after executionsCount# 1 2024-11-21T23:19:55.366866Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:730:2602], 1000} sends rowCount# 3, bytes# 72, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551543, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:19:55.366952Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:730:2602], 1000} finished in read 2024-11-21T23:19:55.367024Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T23:19:55.367051Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:19:55.367077Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:19:55.367104Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:19:55.367162Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2024-11-21T23:19:55.367188Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:19:55.367219Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2024-11-21T23:19:55.367278Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:19:55.367406Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2024-11-21T23:19:24.682985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:19:24.704734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:24.704943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000da4/r3tmp/tmpUtqjyU/pdisk_1.dat 2024-11-21T23:19:26.852666Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.189435s 2024-11-21T23:19:26.852733Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.189520s 2024-11-21T23:19:26.987733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.178961Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:27.253632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:27.261810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:27.316228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:27.602937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.747310Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:19:27.748378Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:19:27.748828Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:19:27.765165Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:19:27.902275Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:19:27.903189Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:19:27.903338Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:19:27.916206Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:19:27.916297Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:19:27.921130Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:19:27.940145Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:19:27.994472Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:19:27.994724Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:19:27.994862Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:19:27.994901Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:19:27.994933Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:19:27.994970Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:27.995468Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:27.995526Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:27.997380Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:19:27.997482Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:19:27.997636Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:27.997686Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.003528Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:19:28.003695Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:19:28.003753Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:19:28.003802Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:19:28.003840Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:19:28.003901Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:19:28.003941Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:19:28.004004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:19:28.004176Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:19:28.004223Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:19:28.011675Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:19:28.026752Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:19:28.026864Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:19:28.027003Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:19:28.027059Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:19:28.027103Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:19:28.027142Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:19:28.027177Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.027537Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:19:28.027618Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:19:28.027651Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:19:28.027685Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.038807Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:19:28.038925Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:19:28.038973Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:19:28.039015Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.039075Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:19:28.040760Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:19:28.040830Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:19:28.055058Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:19:28.055128Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.055169Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.055282Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:19:28.058064Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:19:28.256732Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.256802Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.256843Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:19:28.256972Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:19:28.257000Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:19:28.257111Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.257151Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:19:28.257202Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:19:28.257255Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:19:28.270300Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:19:28.270803Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:28.271839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.271902Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.271963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-2 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T23:19:55.631056Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:891:2736], 1001} after executionsCount# 1 2024-11-21T23:19:55.631100Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:891:2736], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:19:55.631154Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:891:2736], 1001} finished in read 2024-11-21T23:19:55.631201Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T23:19:55.631231Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T23:19:55.631260Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:19:55.631297Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:19:55.631342Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2024-11-21T23:19:55.631381Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:19:55.631407Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2024-11-21T23:19:55.631446Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T23:19:55.631532Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T23:19:55.632105Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:896:2741], Recipient [6:683:2563]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.632146Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.632196Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:895:2740], serverId# [6:896:2741], sessionId# [0:0:0] 2024-11-21T23:19:55.632329Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:894:2739], Recipient [6:683:2563]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T23:19:55.633086Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:899:2744], Recipient [6:683:2563]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.633159Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.633199Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:898:2743], serverId# [6:899:2744], sessionId# [0:0:0] 2024-11-21T23:19:55.633366Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:897:2742], Recipient [6:683:2563]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T23:19:55.633455Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2024-11-21T23:19:55.633494Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T23:19:55.633523Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2024-11-21T23:19:55.633564Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2024-11-21T23:19:55.633623Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2024-11-21T23:19:55.633655Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2024-11-21T23:19:55.633679Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2024-11-21T23:19:55.633718Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2024-11-21T23:19:55.633762Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2024-11-21T23:19:55.633799Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2024-11-21T23:19:55.633822Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2024-11-21T23:19:55.633841Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2024-11-21T23:19:55.633875Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2024-11-21T23:19:55.633971Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T23:19:55.634153Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:897:2742], 1002} after executionsCount# 1 2024-11-21T23:19:55.634203Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:897:2742], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:19:55.634257Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:897:2742], 1002} finished in read 2024-11-21T23:19:55.634323Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2024-11-21T23:19:55.634352Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2024-11-21T23:19:55.634378Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T23:19:55.634427Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2024-11-21T23:19:55.634471Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2024-11-21T23:19:55.634493Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T23:19:55.634516Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2024-11-21T23:19:55.634556Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2024-11-21T23:19:55.634634Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2024-11-21T23:19:55.635250Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:902:2747], Recipient [6:680:2561]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.635290Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.635337Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:901:2746], serverId# [6:902:2747], sessionId# [0:0:0] 2024-11-21T23:19:55.635453Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:900:2745], Recipient [6:680:2561]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T23:19:55.636353Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:905:2750], Recipient [6:680:2561]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.636418Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:55.636459Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:904:2749], serverId# [6:905:2750], sessionId# [0:0:0] 2024-11-21T23:19:55.636632Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:903:2748], Recipient [6:680:2561]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T23:19:55.636699Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2024-11-21T23:19:55.636752Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T23:19:55.636800Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2024-11-21T23:19:55.636850Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2024-11-21T23:19:55.636914Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2024-11-21T23:19:55.636941Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2024-11-21T23:19:55.636965Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2024-11-21T23:19:55.636997Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2024-11-21T23:19:55.637036Z node 6 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2024-11-21T23:19:55.637067Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2024-11-21T23:19:55.637089Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2024-11-21T23:19:55.637109Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2024-11-21T23:19:55.637131Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2024-11-21T23:19:55.637238Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T23:19:55.637421Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:903:2748], 1003} after executionsCount# 1 2024-11-21T23:19:55.637472Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:903:2748], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:19:55.637526Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:903:2748], 1003} finished in read 2024-11-21T23:19:55.637633Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2024-11-21T23:19:55.637689Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2024-11-21T23:19:55.637738Z node 6 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2024-11-21T23:19:55.637778Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2024-11-21T23:19:55.637823Z node 6 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2024-11-21T23:19:55.637846Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2024-11-21T23:19:55.637891Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2024-11-21T23:19:55.637954Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2024-11-21T23:19:55.638040Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> KqpExplain::FewEffects [GOOD] >> KqpExplain::FullOuterJoin >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::Pure >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> KqpFlipJoin::Inner_1 [GOOD] >> KqpFlipJoin::Inner_2 >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin-ColumnStore >> KqpJoin::IdxLookupPartialWithTempTable >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> KqpJoinOrder::TPCDS90-StreamLookupJoin-ColumnStore >> KqpJoin::ExclusionJoin [GOOD] >> KqpJoin::FullOuterJoin >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::CreateAsSelect_BadCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2024-11-21T23:19:24.680527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:19:24.706840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:24.707061Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000dd4/r3tmp/tmpcpJIAf/pdisk_1.dat 2024-11-21T23:19:26.851599Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.193473s 2024-11-21T23:19:26.851710Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.193610s 2024-11-21T23:19:26.977812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.179146Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:27.251779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:27.261816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:27.315639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:27.601204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.744043Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:19:27.744998Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:19:27.745407Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:19:27.766806Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:19:27.919876Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:19:27.921942Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:19:27.922075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:19:27.923808Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:19:27.923890Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:19:27.923940Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:19:27.946635Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:19:27.982550Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:19:27.982742Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:19:27.982844Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:19:27.982882Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:19:27.982921Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:19:27.982959Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:27.983426Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:27.983486Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:27.997468Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:19:27.997568Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:19:27.997727Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:27.997779Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.002572Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:19:28.002755Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:19:28.002821Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:19:28.002866Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:19:28.002903Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:19:28.002937Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:19:28.002987Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:19:28.003051Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:19:28.003229Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:19:28.003302Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:19:28.012134Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:19:28.029024Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:19:28.029097Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:19:28.029183Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:19:28.029242Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:19:28.029282Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:19:28.029321Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:19:28.029361Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.029659Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:19:28.029705Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:19:28.029740Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:19:28.029779Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.041047Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:19:28.041116Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:19:28.041178Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:19:28.041222Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.041273Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:19:28.042789Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:19:28.042847Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:19:28.053560Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:19:28.053653Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.053699Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.053789Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:19:28.058345Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:19:28.256732Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.256803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.256842Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:19:28.256959Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:19:28.256992Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:19:28.257109Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.257149Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:19:28.257201Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:19:28.257246Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:19:28.272920Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:19:28.273010Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:28.273699Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.273753Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.273800Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-2 ... 594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T23:19:59.149570Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:969:2792], 1001} after executionsCount# 1 2024-11-21T23:19:59.149606Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:969:2792], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:19:59.149660Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:969:2792], 1001} finished in read 2024-11-21T23:19:59.149697Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2024-11-21T23:19:59.149714Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2024-11-21T23:19:59.149728Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2024-11-21T23:19:59.149744Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2024-11-21T23:19:59.149775Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2024-11-21T23:19:59.149792Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2024-11-21T23:19:59.149809Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2024-11-21T23:19:59.149830Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2024-11-21T23:19:59.149885Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2024-11-21T23:19:59.150256Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:974:2797], Recipient [6:683:2563]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:59.150282Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:59.150306Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:973:2796], serverId# [6:974:2797], sessionId# [0:0:0] 2024-11-21T23:19:59.150351Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:972:2795], Recipient [6:683:2563]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T23:19:59.151020Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:977:2800], Recipient [6:683:2563]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:59.151071Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:59.151116Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:976:2799], serverId# [6:977:2800], sessionId# [0:0:0] 2024-11-21T23:19:59.151276Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:975:2798], Recipient [6:683:2563]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T23:19:59.151341Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2024-11-21T23:19:59.151376Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T23:19:59.151407Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2024-11-21T23:19:59.151454Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2024-11-21T23:19:59.151512Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2024-11-21T23:19:59.151542Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2024-11-21T23:19:59.151567Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2024-11-21T23:19:59.151607Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2024-11-21T23:19:59.151649Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2024-11-21T23:19:59.151681Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2024-11-21T23:19:59.151728Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2024-11-21T23:19:59.151778Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2024-11-21T23:19:59.151804Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2024-11-21T23:19:59.151881Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T23:19:59.152043Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:975:2798], 1002} after executionsCount# 1 2024-11-21T23:19:59.152087Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:975:2798], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:19:59.152136Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:975:2798], 1002} finished in read 2024-11-21T23:19:59.152179Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2024-11-21T23:19:59.152200Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2024-11-21T23:19:59.152223Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2024-11-21T23:19:59.152247Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2024-11-21T23:19:59.152289Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2024-11-21T23:19:59.152311Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2024-11-21T23:19:59.152336Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2024-11-21T23:19:59.152364Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2024-11-21T23:19:59.152436Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2024-11-21T23:19:59.152908Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:980:2803], Recipient [6:680:2561]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:59.152945Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:59.152992Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:979:2802], serverId# [6:980:2803], sessionId# [0:0:0] 2024-11-21T23:19:59.153093Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:978:2801], Recipient [6:680:2561]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T23:19:59.153811Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:983:2806], Recipient [6:680:2561]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:59.153854Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:59.153892Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:982:2805], serverId# [6:983:2806], sessionId# [0:0:0] 2024-11-21T23:19:59.154043Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:981:2804], Recipient [6:680:2561]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T23:19:59.154121Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2024-11-21T23:19:59.154170Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2024-11-21T23:19:59.154202Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2024-11-21T23:19:59.154251Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2024-11-21T23:19:59.154322Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2024-11-21T23:19:59.154353Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2024-11-21T23:19:59.154425Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2024-11-21T23:19:59.154453Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2024-11-21T23:19:59.154496Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2024-11-21T23:19:59.154529Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2024-11-21T23:19:59.154557Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2024-11-21T23:19:59.154584Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2024-11-21T23:19:59.154608Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2024-11-21T23:19:59.154702Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T23:19:59.154841Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:981:2804], 1003} after executionsCount# 1 2024-11-21T23:19:59.154894Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:981:2804], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:19:59.154954Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:981:2804], 1003} finished in read 2024-11-21T23:19:59.155006Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2024-11-21T23:19:59.155033Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2024-11-21T23:19:59.155059Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2024-11-21T23:19:59.155100Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2024-11-21T23:19:59.155155Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2024-11-21T23:19:59.155179Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2024-11-21T23:19:59.155205Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2024-11-21T23:19:59.155237Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2024-11-21T23:19:59.155312Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> TColumnShardTestSchema::InternalTTL_Types [GOOD] >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> KqpLimits::TooBigKey [GOOD] >> KqpLimits::TooBigColumn >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst+StreamLookupJoin-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL_Types [GOOD] Test command err: 2024-11-21T23:19:31.678421Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:19:31.775671Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:19:31.780704Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:19:31.781128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:19:31.804266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:19:31.804380Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:19:31.804640Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:19:31.813023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:31.813255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:31.813482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:31.813589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:31.813719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:19:31.813811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:19:31.813942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:19:31.814040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:19:31.814141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:19:31.814235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:19:31.814351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:19:31.814595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:19:31.836880Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:19:31.837244Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T23:19:31.837325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:19:31.837372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:19:31.841359Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:19:31.841621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:19:31.841667Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:19:31.841884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:31.842095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:31.842176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:31.842238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:19:31.842350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:19:31.842439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:31.842484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:31.842521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:19:31.842698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:31.842768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:31.843020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:31.843092Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:19:31.843204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:19:31.843276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:31.843337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:31.843394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:19:31.843473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:31.843512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:31.843577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:19:31.843629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:31.843670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:31.843697Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:19:31.843959Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=119; 2024-11-21T23:19:31.844052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2024-11-21T23:19:31.844140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2024-11-21T23:19:31.844233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2024-11-21T23:19:31.844414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:31.844468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:31.844501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:19:31.844721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:19:31.844772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:19:31.844799Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:19:31.844962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:19:31.845023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; ... ks=0);; 2024-11-21T23:20:01.950550Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T23:20:01.950598Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T23:20:01.950640Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=16; 2024-11-21T23:20:01.950684Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=16; 2024-11-21T23:20:01.950732Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T23:20:01.950843Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:20:01.950882Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T23:20:01.950925Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:20:01.951080Z node 4 :TX_COLUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [4:275:2287], Recipient [4:106:2138]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2024-11-21T23:20:01.951113Z node 4 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2024-11-21T23:20:01.951438Z node 4 :TX_COLUMNSHARD DEBUG: TxWriteIndex[25] (CS::INDEXATION) apply at tablet 9437184 2024-11-21T23:20:01.954599Z node 4 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 3 2024-11-21T23:20:01.954745Z node 4 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9781736;raw_bytes=15906843; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:20:01.954791Z node 4 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[4:106:2138];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=218e6fba-a85f11ef-b523f12f-3bddf8d2;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:20:01.955308Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:20:01.955460Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:20:01.955505Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:20:01.955643Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=80000; 2024-11-21T23:20:01.955707Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=saved_at; 2024-11-21T23:20:01.956543Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: uint64 2024-11-21T23:20:01.956695Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:20:01.956819Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:20:01.956913Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:20:01.957062Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:20:01.957159Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:20:01.957268Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:20:01.957322Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] finished for tablet 9437184 2024-11-21T23:20:01.957399Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:277:2289] send ScanData to [4:272:2284] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:20:01.957833Z node 4 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [4:277:2289] and sent to [4:272:2284] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.079},{"events":["f_ack"],"t":0.083},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.085}],"full":{"a":1732231201871941,"name":"_full_task","f":1732231201871941,"d_finished":0,"c":0,"l":1732231201957444,"d":85503},"events":[{"name":"bootstrap","f":1732231201872517,"d_finished":3345,"c":1,"l":1732231201875862,"d":3345},{"a":1732231201957040,"name":"ack","f":1732231201955274,"d_finished":1663,"c":1,"l":1732231201956937,"d":2067},{"a":1732231201957026,"name":"processing","f":1732231201876222,"d_finished":3820,"c":5,"l":1732231201956940,"d":4238},{"name":"ProduceResults","f":1732231201873747,"d_finished":2676,"c":8,"l":1732231201957296,"d":2676},{"a":1732231201957299,"name":"Finish","f":1732231201957299,"d_finished":0,"c":0,"l":1732231201957444,"d":145},{"name":"task_result","f":1732231201876240,"d_finished":2054,"c":4,"l":1732231201950983,"d":2054}],"id":"9437184::12"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T23:20:01.957905Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:20:01.870900Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T23:20:01.957949Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:20:01.958060Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.074719s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.069270s;size=0.0063152;details={columns=9;};};]};; 2024-11-21T23:20:01.958137Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:277:2289];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 25897, MsgBus: 5255 2024-11-21T23:19:46.021223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876294807439405:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:46.021271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002862/r3tmp/tmpZ73l5S/pdisk_1.dat 2024-11-21T23:19:46.501831Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:46.510291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:46.510538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:46.514291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25897, node 1 2024-11-21T23:19:46.659223Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:46.659251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:46.659268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:46.659399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5255 TClient is connected to server localhost:5255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:47.338182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.373408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.537754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.701642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.777577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.675057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876307692342980:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:49.675205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:49.925050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:49.996710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.022533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.066625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.121021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.206546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.256032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876311987310783:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.256143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.256241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876311987310788:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.260169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:50.273794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876311987310790:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:51.062967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876294807439405:2055];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:51.063266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:51.397729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20589, MsgBus: 11497 2024-11-21T23:19:52.342141Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876320715586395:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002862/r3tmp/tmpsCaHrA/pdisk_1.dat 2024-11-21T23:19:52.404073Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:52.457610Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:52.468525Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:52.468603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:52.469888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20589, node 2 2024-11-21T23:19:52.533680Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:52.533704Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:52.533712Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:52.533817Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11497 TClient is connected to server localhost:11497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:53.036613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.051386Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:55.416239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876333600488706:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:55.416407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:55.416916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876333600488742:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:55.420929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:19:55.439006Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876333600488745:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:19:55.540956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:19:55.792653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9424, MsgBus: 25198 2024-11-21T23:19:57.175209Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876343443040663:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:57.175257Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002862/r3tmp/tmpcuNryh/pdisk_1.dat 2024-11-21T23:19:57.473583Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:57.497125Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:57.497213Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:57.500376Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9424, node 3 2024-11-21T23:19:57.628205Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:57.628235Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:57.628242Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:57.628352Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25198 TClient is connected to server localhost:25198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:58.121841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.129879Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:00.702174Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876356327942932:2300], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:00.702274Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:00.706788Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876356327942968:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:00.712072Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:20:00.729745Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876356327942970:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:20:00.834564Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.186095Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876360622910451:2327], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2024-11-21T23:20:01.187767Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2UxNWNhNmEtMTQyM2QwNmUtOGEzYjE3YzQtMmZhMjVjOGQ=, ActorId: [3:7439876360622910449:2326], ActorState: ExecuteState, TraceId: 01jd8gd0bx4j4wm8fr3mth2fgr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28345, MsgBus: 17963 2024-11-21T23:19:40.176998Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876270239651728:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:40.177260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002894/r3tmp/tmpUlVNwU/pdisk_1.dat 2024-11-21T23:19:40.852108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:40.852193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:40.853882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:40.876025Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28345, node 1 2024-11-21T23:19:41.021437Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:41.021459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:41.021468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:41.021566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17963 TClient is connected to server localhost:17963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:41.915378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.931283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:41.950533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:42.181982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:42.374332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:42.476230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:44.413880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876287419522468:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.425320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.456222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.504472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.543812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.575338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.608164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.666517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.743507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876287419522966:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.743608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.743902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876287419522971:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.746980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:44.757896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876287419522973:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:45.166563Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876270239651728:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:45.166625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:45.943840Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876291714490594:2465], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2024-11-21T23:19:45.945443Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDhmOTgxZWUtYzgyNTRiMjktNGU4MGYzNC03MTJlNDY5NA==, ActorId: [1:7439876291714490586:2460], ActorState: ExecuteState, TraceId: 01jd8gchfv40r13nk2d337d37r, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 24710, MsgBus: 9555 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002894/r3tmp/tmpgmbNMN/pdisk_1.dat 2024-11-21T23:19:47.037625Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:47.150056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:47.158591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:47.159040Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:47.178263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24710, node 2 2024-11-21T23:19:47.322936Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:47.322960Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:47.322967Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:47.323063Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9555 TClient is connected to server localhost:9555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:47.907774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.914917Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:50.664568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876312684697196:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.664636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.692841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.926387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:19:51.155693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876316979665793:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:51.155811Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:51.156061Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876316979665798:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:51.160799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T23:19:51.170412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876316979665800:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } Trying to start YDB, gRPC: 3538, MsgBus: 28527 2024-11-21T23:19:54.209836Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876330846644261:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002894/r3tmp/tmpI1z8sK/pdisk_1.dat 2024-11-21T23:19:54.328223Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:54.421476Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:54.423672Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:54.423756Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:54.427513Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3538, node 3 2024-11-21T23:19:54.507036Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:54.507084Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:54.507093Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:54.507205Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28527 TClient is connected to server localhost:28527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:55.269344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.280813Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876348026513927:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.281003Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.295403Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.498937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.868141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876348026515255:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.868247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.868532Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876348026515260:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.872987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2024-11-21T23:19:58.886148Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876348026515262:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2024-11-21T23:19:59.186026Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876330846644261:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:59.186806Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::RightSemi [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable >> KqpParams::InvalidJson [GOOD] >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore >> KqpQuery::Pure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 15857, MsgBus: 12692 2024-11-21T23:19:43.523979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876283898608808:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:43.524020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002882/r3tmp/tmpyYRIeT/pdisk_1.dat 2024-11-21T23:19:44.109074Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:44.110952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:44.111025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:44.115502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15857, node 1 2024-11-21T23:19:44.240620Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:44.240647Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:44.240653Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:44.240746Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12692 TClient is connected to server localhost:12692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:44.964109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:45.059824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:45.320301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:45.602507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:45.700814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.483987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876301078479501:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:47.484109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:47.710920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:47.751780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:47.785914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:47.817702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:47.857623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:47.952094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.013130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876305373447297:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.013226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.013376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876305373447302:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.017728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:48.033661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876305373447304:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:48.535318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876283898608808:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:48.535373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:49.428294Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWNlMjhkNDUtYmQzMjdhNDQtNjE3NjM0MzMtNDgzZmYwMzU=, ActorId: [1:7439876309668414916:2460], ActorState: ExecuteState, TraceId: 01jd8gcmqe94ce5zf1xqvp15fb, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1189: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Missing value for parameter: $group Trying to start YDB, gRPC: 29836, MsgBus: 63497 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002882/r3tmp/tmpDdXEnA/pdisk_1.dat 2024-11-21T23:19:50.337870Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:50.463276Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:50.498910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:50.499002Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:50.500828Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29836, node 2 2024-11-21T23:19:50.559292Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:50.559321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:50.559328Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:50.559430Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63497 TClient is connected to server localhost:63497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:51.021330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:51.030121Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:51.047169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:51.134611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:51.324784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:51.411118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.744949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876326749709276:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:53.745020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:53.790762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:53.839978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:53.872705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:53.902719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:53.947957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:54.033681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:54.104105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876331044677072:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:54.104196Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:54.104606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876331044677077:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:54.108711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:54.120171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876331044677079:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } Trying to start YDB, gRPC: 7381, MsgBus: 19091 2024-11-21T23:19:56.989938Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876336698904601:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:56.989977Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002882/r3tmp/tmpusAsds/pdisk_1.dat 2024-11-21T23:19:57.201542Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:57.239509Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:57.239622Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:57.243553Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7381, node 3 2024-11-21T23:19:57.451003Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:57.451025Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:57.451034Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:57.451144Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19091 TClient is connected to server localhost:19091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:58.027899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.038351Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:58.053617Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:58.133414Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.310219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:58.391256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.199818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876358173742796:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.199926Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.236174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.275442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.303690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.338934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.371464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.439181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.497318Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876358173743295:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.497407Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.497741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876358173743300:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.546860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:01.556987Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876358173743302:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:02.035774Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876336698904601:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:02.035932Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:02.800974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.967255Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZjE1NWJmNTAtMzQzZTkyNTQtMjUzYjUwYzctOTk4YWRjZGI=, ActorId: [3:7439876362468710914:2460], ActorState: ExecuteState, TraceId: 01jd8gd215bgsfw477r1z9e5vq, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:913: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:913: Invalid Json value >> KqpJoinOrder::TPCDS94-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::NoEvaluate [GOOD] Test command err: Trying to start YDB, gRPC: 17199, MsgBus: 14537 2024-11-21T23:19:41.603619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876272160984806:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:41.603754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00288c/r3tmp/tmp6BnKjB/pdisk_1.dat 2024-11-21T23:19:42.130921Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:42.139295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:42.139592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:42.165139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17199, node 1 2024-11-21T23:19:42.315867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:42.315898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:42.315905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:42.316030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14537 TClient is connected to server localhost:14537 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:42.948233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:42.984264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:43.166939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:19:43.383471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:43.467177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:45.463415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876289340855542:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:45.463531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:45.756724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:45.786323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:45.826673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:45.879203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:45.918644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:45.966769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:46.052156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876293635823335:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:46.052240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:46.052583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876293635823340:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:46.057399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:46.073874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876293635823342:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:46.607039Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876272160984806:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:46.653649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9540, MsgBus: 13297 2024-11-21T23:19:48.561431Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876304893612616:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:48.561575Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00288c/r3tmp/tmpQhsPVX/pdisk_1.dat 2024-11-21T23:19:48.747272Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9540, node 2 2024-11-21T23:19:48.870678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:48.870791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:48.899801Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:48.975011Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:48.975047Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:48.975056Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:48.975152Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13297 TClient is connected to server localhost:13297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:49.571623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.579289Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:49.589033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.677202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.922728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:50.010201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:52.435403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876322073483509:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ha ... at schemeshard: 72057594046644480 2024-11-21T23:19:52.711951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:52.762181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:52.820279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876322073484012:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:52.820346Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:52.820546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876322073484017:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:52.823821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:52.837540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876322073484019:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:53.561825Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876304893612616:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:53.561892Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:53.929027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 7028, MsgBus: 12660 2024-11-21T23:19:57.099077Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876343833883761:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:57.099145Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00288c/r3tmp/tmp7EcpkU/pdisk_1.dat 2024-11-21T23:19:57.342070Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:57.350890Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:57.350992Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:57.354001Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7028, node 3 2024-11-21T23:19:57.466830Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:57.466854Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:57.466863Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:57.466969Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12660 TClient is connected to server localhost:12660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:58.093010Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.104618Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.118708Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.242677Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.440751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.586904Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:00.932147Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876356718787347:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:00.932233Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.009310Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.076620Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.154043Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.220443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.263907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.339348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.434806Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876361013755148:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.434910Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.435419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876361013755153:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:01.440281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:01.466957Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876361013755155:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:02.099473Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876343833883761:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:02.099551Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:02.981551Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876365308722774:2466], status: UNSUPPORTED, issues:
: Error: Default error
:7:24: Error: EVALUATE IF is not supported in YDB queries., code: 2030 2024-11-21T23:20:02.982719Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzZkNTU4NzEtNGI2MGViNGItODNhNDQ5OWQtZmRkOTJkZTQ=, ActorId: [3:7439876365308722764:2460], ActorState: ExecuteState, TraceId: 01jd8gd2344k9ddmz01m72sc0a, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2024-11-21T23:20:03.009041Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876365308722778:2468], status: UNSUPPORTED, issues:
: Error: Default error
:4:28: Error: EVALUATE is not supported in YDB queries., code: 2030 2024-11-21T23:20:03.010815Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzZkNTU4NzEtNGI2MGViNGItODNhNDQ5OWQtZmRkOTJkZTQ=, ActorId: [3:7439876365308722764:2460], ActorState: ExecuteState, TraceId: 01jd8gd25g0njg6g3ddsvbkjn3, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: 2024-11-21T23:20:03.125954Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876369603690103:2473], status: UNSUPPORTED, issues:
: Error: Default error
:8:78: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2024-11-21T23:20:03.128244Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzZkNTU4NzEtNGI2MGViNGItODNhNDQ5OWQtZmRkOTJkZTQ=, ActorId: [3:7439876365308722764:2460], ActorState: ExecuteState, TraceId: 01jd8gd2988v2kbmws0vy4vndd, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull >> KqpLimits::CancelAfterRwTx [GOOD] >> KqpExplain::FullOuterJoin [GOOD] >> KqpJoinOrder::TPCDS9_SMALL+StreamLookupJoin-ColumnStore >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 32122, MsgBus: 1183 2024-11-21T23:19:45.833938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876288813045640:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:45.843359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002869/r3tmp/tmpCXOeAy/pdisk_1.dat 2024-11-21T23:19:46.463594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:46.463709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:46.469782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:46.508222Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32122, node 1 2024-11-21T23:19:46.747046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:46.747079Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:46.747092Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:46.747220Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1183 TClient is connected to server localhost:1183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:47.368315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.391739Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:47.412559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.552161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.719226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.806090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.801795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876305992916554:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:49.802095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.057899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.100472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.132757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.170256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.212076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.296913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.342288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876310287884352:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.342374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.342569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876310287884357:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.346284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:50.359802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876310287884359:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:50.835494Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876288813045640:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:50.835623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24378, MsgBus: 21262 2024-11-21T23:19:52.299705Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876320637822313:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:52.299738Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002869/r3tmp/tmprkUt58/pdisk_1.dat 2024-11-21T23:19:52.419250Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:52.439499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:52.439609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:52.441011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24378, node 2 2024-11-21T23:19:52.533710Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:52.533740Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:52.533747Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:52.533855Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21262 TClient is connected to server localhost:21262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:53.066684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.077293Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:53.082268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.194340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.374953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.474783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:55.651540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876333522725758:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:55.651644Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:55.700765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:55.748762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:55.811782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:55.880767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:55.956966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.027927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.126299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876337817693553:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.126426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.131806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876337817693558:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.137939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:56.164853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876337817693560:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:57.304123Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876320637822313:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:57.304184Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:57.629670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2075, MsgBus: 26700 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002869/r3tmp/tmpHfDHFb/pdisk_1.dat 2024-11-21T23:19:58.833785Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:58.882871Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:58.904642Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 2075, node 3 2024-11-21T23:19:58.906732Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:58.908468Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:58.968916Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:58.968936Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:58.968940Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:58.969015Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26700 TClient is connected to server localhost:26700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:59.467812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:59.489233Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:59.551829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:59.726099Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:59.838653Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:02.618566Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876362464895993:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:02.618684Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:02.700632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.732773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.771685Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.847252Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.903037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.955067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:03.045529Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876366759863794:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:03.045596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876366759863799:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:03.045634Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:03.049511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:03.057691Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876366759863801:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } >> KqpFlipJoin::Inner_2 [GOOD] >> KqpFlipJoin::Inner_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 6210, MsgBus: 6580 2024-11-21T23:19:45.080307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876291007334010:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:45.080855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002871/r3tmp/tmpUSnRRN/pdisk_1.dat 2024-11-21T23:19:45.613521Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:45.618954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:45.619039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:45.623213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6210, node 1 2024-11-21T23:19:45.745589Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:45.745610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:45.745617Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:45.745694Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6580 TClient is connected to server localhost:6580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:46.294562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:46.331115Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:46.348126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:46.521124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:46.735991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:46.820055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:48.793331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876303892237614:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.793451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:49.017071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:49.041164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:49.074450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:49.112389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:49.144509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:49.229775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:49.330825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876308187205418:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:49.330907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:49.331268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876308187205423:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:49.335322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:49.363962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876308187205425:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:50.086829Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876291007334010:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:50.099011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2},{"InternalOperatorId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Limit","Limit":"1"}],"Node Type":"Aggregate-Limit"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 9127, MsgBus: 13419 2024-11-21T23:19:51.947918Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876316445538761:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:51.947967Z node 2 :METADATA ... Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Inputs":[],"ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_3_0","Node Type":"Precompute_3","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":8,"Plans":[{"Tables":["EightShard"],"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"Effect"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (350, +∞)"],"type":"Scan"},{"columns":["Data","Key"],"scan_by":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"type":"Scan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"},{"columns":["Data","Key"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Delete","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":18,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":24,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} Trying to start YDB, gRPC: 17682, MsgBus: 16806 2024-11-21T23:19:58.538581Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876346758390482:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:58.539442Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002871/r3tmp/tmp0bYMTm/pdisk_1.dat 2024-11-21T23:19:58.658835Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:58.673065Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:58.673157Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:58.674934Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17682, node 3 2024-11-21T23:19:58.790968Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:58.790989Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:58.790996Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:58.791098Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16806 TClient is connected to server localhost:16806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:19:59.367787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.375732Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:59.389674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:59.452559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:59.619065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:59.697039Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:02.117503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876363938261356:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:02.117638Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:02.164059Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.205748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.240323Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.350012Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.410530Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.474193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.590482Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876363938261861:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:02.590594Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:02.590881Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876363938261866:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:02.595428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:02.605892Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876363938261868:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:03.535171Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876346758390482:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:03.535248Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:03.893524Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:04.317270Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:04.358898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx [GOOD] Test command err: Trying to start YDB, gRPC: 4681, MsgBus: 21462 2024-11-21T23:17:42.797634Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875762999658168:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:42.797686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002938/r3tmp/tmp21qebX/pdisk_1.dat 2024-11-21T23:17:43.312705Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:43.323040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:43.323134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:43.325206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4681, node 1 2024-11-21T23:17:43.426647Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:43.426666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:43.426670Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:43.426747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21462 TClient is connected to server localhost:21462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:44.140482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.156326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:17:44.170178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.327075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.495805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:44.581195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:46.513010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875780179529046:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:46.513155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:46.770923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:46.845847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:46.886162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:46.976365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.017142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.063521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:47.147707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875784474496848:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:47.147783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:47.148093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875784474496853:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:47.152106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:47.167962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875784474496855:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:47.798680Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875762999658168:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:47.798745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2024-11-21T23:17:48.341100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:48.702163Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710672;task_id=2;memory=8388608; 2024-11-21T23:17:48.702193Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710672, task: 2. [Mem] memory 8388608 NOT granted 2024-11-21T23:17:53.807916Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439875788769464872:2496], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd8g8yw23b55s0emhjs7wt8e. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjliNmJmNDAtMWQyMTY2ZDMtODY3MjkyZmUtZDBkNjk4MDA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-uc25mmfz34, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710672, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 8MiB, tx largest successful memory allocation: 8MiB, tx largest failed memory allocation: 8MiB, tx total execution units: 2, started at: 2024-11-21T23:17:48.699811Z } TxMaxAllocationBacktrace: 0. /-S/ydb/core/kqp/rm_service/kqp_rm_service.h:251: Allocated @ 0x3C6B3001 1. /-S/ydb/core/kqp/rm_service/kqp_rm_service.cpp:335: AllocateResources @ 0x3C6ACC6A 2. /-S/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp:36: AllocateExtraQuota @ 0x3FEE99FE 3. /-S/ydb/library/yql/dq/actors/compute/dq_compute_actor.h:296: ?? @ 0x3FEE93CE 4. /-S/ydb/library/yql/dq/actors/compute/dq_compute_memory_quota.h:144: RequestExtraMemory @ 0x3DCBEBF5 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x1D14BDAF 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x1D14BDAF 7. /-S/yql/essentials/minikql/aligned_page_pool.cpp:592: TryIncreaseLimit @ 0x1D14BDAF 8. /-S/yql/essentials/minikql/aligned_page_pool.cpp:372: GetPage @ 0x1D14BDAF 9. /-S/yql/essentials/minikql/mkql_alloc.cpp:193: MKQLAllocSlow @ 0x1D172827 10. /-S/yql/essentials/minikql/mkql_alloc.h:343: MKQLAllocFastWithSize @ 0x3C491A73 11. /-S/yql/essentials/minikql/mkql_alloc.h:462: AllocateOn, NKikimr::NMiniKQL::TMemoryUsageInfo *, const NKikimr::NMiniKQL::TType *&, TVector >, std::__y1::allocator > > >, NYql::NDq::TDqMeteringStats::TInputStatsMeter &> @ 0x3C491A73 12. /-S/yql/essentials/minikql/computation/mkql_computation_node_holders.h:841: Create, const NKikimr::NMiniKQL::TType *&, TVector >, std::__y1::allocator > > >, NYql::NDq::TDqMeteringStats::TInputStatsMeter &> @ 0x3C491A73 13. /-S/ydb/library/yql/dq/runtime/dq_input_producer.cpp:749: CreateInputUnionValue @ 0x3C491A73 14. /-S/ydb/library/yql/dq/runtime/dq_tasks_runner.cpp:150: DqBuildInputValue @ 0x3C46577E 15. /-S/ydb/library/yql/dq/runtime/dq_tasks_runner.cpp:590: Prepare @ 0x3C46C819 16. /-S/ydb/library/yql/dq/actors/compute/dq_sync_compute_actor_base.h:219: PrepareTaskRunner @ 0x3FEF9C85 17. /-S/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp:80: DoBootstrap @ 0x3FEF6390 18. /-S/ydb/library/yql/dq/actors/compute/dq_compute_actor_impl.h:149: Bootstrap @ 0x3FF247B1 19. /-S/ydb/library/actors/core/actor_bootstrapped.h:26: StateBootstrap @ 0x3FF2334E 20. /-S/ydb/library/actors/core/actor.h:533: Receive @ 0x1ACE7D86 21. /-S/ydb/library/actors/core/executor_thread.cpp:248: Execute @ 0x1ACE1B58 22. /-S/ydb/library/actors/core/executor_thread.cpp:425: operator() @ ... ELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:34.183786Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876245438576053:5377], TxId: 281474976716028, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. CustomerSuppliedId : . TraceId : 01jd8gc5k8aw507cy7ct2jvtgj. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439876245438576043:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:34.184009Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876245438576050:5374], TxId: 281474976716028, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. CustomerSuppliedId : . TraceId : 01jd8gc5k8aw507cy7ct2jvtgj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439876245438576043:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:34.184119Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439876245438576043:2459] TxId: 281474976716028. Ctx: { TraceId: 01jd8gc5k8aw507cy7ct2jvtgj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2024-11-21T23:19:34.184355Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gc5k8aw507cy7ct2jvtgj, Create QueryResponse for error on request, msg: 2024-11-21T23:19:35.487433Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gc6w4eqsb662nh2rdd3we, Create QueryResponse for error on request, msg: 2024-11-21T23:19:36.535555Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439876254028511038:2459] TxId: 281474976716038. Ctx: { TraceId: 01jd8gc7wk26c3enm7rjb3hsqw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 437ms } {
: Error: Cancelling after 448ms during execution } ] 2024-11-21T23:19:36.536085Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876254028511050:5458], TxId: 281474976716038, task: 6. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. TraceId : 01jd8gc7wk26c3enm7rjb3hsqw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439876254028511038:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:36.536690Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876254028511053:5461], TxId: 281474976716038, task: 9. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. CustomerSuppliedId : . TraceId : 01jd8gc7wk26c3enm7rjb3hsqw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439876254028511038:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:36.536975Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876254028511045:5453], TxId: 281474976716038, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd8gc7wk26c3enm7rjb3hsqw. SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439876254028511038:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:36.537215Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876254028511046:5454], TxId: 281474976716038, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. TraceId : 01jd8gc7wk26c3enm7rjb3hsqw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439876254028511038:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:36.537433Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876254028511047:5455], TxId: 281474976716038, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. TraceId : 01jd8gc7wk26c3enm7rjb3hsqw. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439876254028511038:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:36.537859Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876254028511048:5456], TxId: 281474976716038, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. CustomerSuppliedId : . TraceId : 01jd8gc7wk26c3enm7rjb3hsqw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439876254028511038:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:36.538155Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876254028511049:5457], TxId: 281474976716038, task: 5. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. TraceId : 01jd8gc7wk26c3enm7rjb3hsqw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439876254028511038:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:36.542473Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876254028511052:5460], TxId: 281474976716038, task: 8. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. TraceId : 01jd8gc7wk26c3enm7rjb3hsqw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439876254028511038:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:36.545519Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gc7wk26c3enm7rjb3hsqw, Create QueryResponse for error on request, msg: 2024-11-21T23:19:37.868185Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gc962dnw35wgg7z1st6s1, Create QueryResponse for error on request, msg: 2024-11-21T23:19:44.154932Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gcf961wkdz84yz7kj6anx, Create QueryResponse for error on request, msg: 2024-11-21T23:19:45.215124Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gcgbe70967fsdx8aprgh3, Create QueryResponse for error on request, msg: 2024-11-21T23:19:47.128148Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439876301273153096:2459] TxId: 281474976716084. Ctx: { TraceId: 01jd8gcj6ffhhtf0ab6sdfchpz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 463ms } {
: Error: Cancelling after 465ms during execution } ] 2024-11-21T23:19:47.128932Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gcj6ffhhtf0ab6sdfchpz, Create QueryResponse for error on request, msg: 2024-11-21T23:19:47.613101Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439876301273153169:2459] TxId: 281474976716086. Ctx: { TraceId: 01jd8gcjpbcr81qm584wyckgm6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 464ms } {
: Error: Cancelling after 465ms during execution } ] 2024-11-21T23:19:47.613484Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876301273153185:5822], TxId: 281474976716086, task: 9. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. TraceId : 01jd8gcjpbcr81qm584wyckgm6. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439876301273153169:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:47.639083Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876301273153179:5817], TxId: 281474976716086, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. TraceId : 01jd8gcjpbcr81qm584wyckgm6. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7439876301273153169:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:47.641345Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gcjpbcr81qm584wyckgm6, Create QueryResponse for error on request, msg: 2024-11-21T23:19:57.407480Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439876344222827853:2459] TxId: 281474976716130. Ctx: { TraceId: 01jd8gcw7pb2pydk1fkd775vb2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 486ms } {
: Error: Cancelling after 488ms during execution } ] 2024-11-21T23:19:57.407720Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7439876344222827869:6149], TxId: 281474976716130, task: 9. Ctx: { CustomerSuppliedId : . TraceId : 01jd8gcw7pb2pydk1fkd775vb2. SessionId : ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7439876344222827853:2459], status: CANCELLED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:57.407865Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439876344222827853:2459] TxId: 281474976716130. Ctx: { TraceId: 01jd8gcw7pb2pydk1fkd775vb2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2024-11-21T23:19:57.408059Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YjhkZGExNjktY2Y1ZjM4NGEtZTAwMTY0ODUtNTBmNzc2N2Q=, ActorId: [3:7439875867481439555:2459], ActorState: ExecuteState, TraceId: 01jd8gcw7pb2pydk1fkd775vb2, Create QueryResponse for error on request, msg: >> KqpJoin::FullOuterJoin [GOOD] >> KqpJoin::CrossJoinCount >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2024-11-21T23:19:24.846716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:19:24.856124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:24.856355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e1e/r3tmp/tmpAROQPQ/pdisk_1.dat 2024-11-21T23:19:26.850891Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.153033s 2024-11-21T23:19:26.851035Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.153221s 2024-11-21T23:19:26.981760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.178966Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:27.253362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:27.262421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:27.316563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:27.600468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.743735Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:19:27.744936Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:19:27.745382Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:19:27.765036Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:19:27.888708Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:19:27.897292Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:19:27.897491Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:19:27.914964Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:19:27.915066Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:19:27.920415Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:19:27.936643Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:19:27.956551Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:19:27.978906Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:19:27.979178Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:19:27.979223Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:19:27.979267Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:19:27.979330Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:27.979862Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:27.979911Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:27.996769Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:19:27.996926Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:19:27.997140Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:27.997194Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.002529Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:19:28.002715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:19:28.002764Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:19:28.002814Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:19:28.002854Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:19:28.002898Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:19:28.002933Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:19:28.003003Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:19:28.003197Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:19:28.003245Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:19:28.011513Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:19:28.028058Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:19:28.028144Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:19:28.028236Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:19:28.028283Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:19:28.028323Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:19:28.028360Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:19:28.028395Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.028683Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:19:28.028719Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:19:28.028756Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:19:28.028786Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.038851Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:19:28.038945Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:19:28.039008Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:19:28.039052Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.039109Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:19:28.040760Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:19:28.040825Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:19:28.051682Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:19:28.051777Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.051816Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.051906Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:19:28.057651Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:19:28.279617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.279674Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.279716Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:19:28.279831Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:19:28.279861Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:19:28.279987Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.280034Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:19:28.280092Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:19:28.280137Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:19:28.289470Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:19:28.289568Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:28.290192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.290241Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.290318Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-2 ... xecuting on unit WaitForPlan 2024-11-21T23:20:05.457743Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:20:05.457987Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 515 RawX2: 30064773525 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:20:05.458048Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:20:05.458827Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:770:2631], Recipient [7:770:2631]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:20:05.458893Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:20:05.458977Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:20:05.459039Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:20:05.459090Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:20:05.459149Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2024-11-21T23:20:05.459202Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2024-11-21T23:20:05.459270Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T23:20:05.459331Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2024-11-21T23:20:05.459389Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2024-11-21T23:20:05.459438Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2024-11-21T23:20:05.459837Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2024-11-21T23:20:05.459957Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2024-11-21T23:20:05.460032Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2024-11-21T23:20:05.460129Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2024-11-21T23:20:05.460182Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T23:20:05.460217Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2024-11-21T23:20:05.460270Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:20:05.460305Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:20:05.460381Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2024-11-21T23:20:05.460434Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2024-11-21T23:20:05.460488Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2024-11-21T23:20:05.460539Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T23:20:05.460566Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:20:05.460590Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2024-11-21T23:20:05.460618Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2024-11-21T23:20:05.460663Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T23:20:05.460687Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2024-11-21T23:20:05.460712Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2024-11-21T23:20:05.460737Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2024-11-21T23:20:05.460764Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T23:20:05.460791Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2024-11-21T23:20:05.460815Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2024-11-21T23:20:05.460839Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2024-11-21T23:20:05.460867Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T23:20:05.460889Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2024-11-21T23:20:05.460911Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T23:20:05.460936Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T23:20:05.460969Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T23:20:05.460996Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T23:20:05.461020Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2024-11-21T23:20:05.461044Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2024-11-21T23:20:05.461081Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2024-11-21T23:20:05.461442Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2024-11-21T23:20:05.461536Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2024-11-21T23:20:05.461597Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2024-11-21T23:20:05.461639Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2024-11-21T23:20:05.461687Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:20:05.461742Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:20:05.461793Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:20:05.462257Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:20:05.462318Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2024-11-21T23:20:05.462603Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2024-11-21T23:20:05.462980Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2024-11-21T23:20:05.463084Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2024-11-21T23:20:05.463155Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2024-11-21T23:20:05.463257Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2024-11-21T23:20:05.463451Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2024-11-21T23:20:05.463518Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2024-11-21T23:20:05.463641Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T23:20:05.463714Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:20:05.463777Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2024-11-21T23:20:05.463822Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2024-11-21T23:20:05.463872Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2024-11-21T23:20:05.464156Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2024-11-21T23:20:05.464210Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2024-11-21T23:20:05.464264Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:20:05.464315Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:20:05.464355Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2024-11-21T23:20:05.464390Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:20:05.464444Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2024-11-21T23:20:05.464492Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:20:05.464538Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:20:05.464591Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:20:05.464638Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:20:05.465360Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 3500 txid# 1234567890011} 2024-11-21T23:20:05.465468Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2024-11-21T23:20:05.466217Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:20:05.466287Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2024-11-21T23:20:05.466374Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:763:2625] 2024-11-21T23:20:05.466685Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> KqpFlipJoin::Right_1 >> KqpJoinOrder::TPCH11-StreamLookupJoin-ColumnStore >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] >> KqpJoin::IdxLookupSelf >> KqpJoinOrder::TPCH10-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH10+StreamLookupJoin-ColumnStore >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> KqpExplain::Predicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231724.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=132231724.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231724.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231724.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=132231724.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231724.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230524.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=112231724.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112231724.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230524.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=112230524.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112230524.000000s;Name=;Codec=}; 2024-11-21T23:18:45.310452Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:18:45.419752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:18:45.440417Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:18:45.440486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:18:45.440696Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:18:45.448069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:18:45.448310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:18:45.448506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:18:45.448584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:18:45.448648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:18:45.448725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:18:45.448807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:18:45.448905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:18:45.448985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:18:45.449052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:18:45.449111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:18:45.449169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:18:45.470121Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:18:45.470206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:18:45.484563Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:18:45.484869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:18:45.484932Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:18:45.485125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:45.485301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:18:45.485376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:18:45.485427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:18:45.485513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:18:45.485577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:18:45.485636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:18:45.485686Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:18:45.485866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:18:45.485934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:18:45.485977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:18:45.486007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:18:45.486101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:18:45.486154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:18:45.486198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:18:45.486231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:18:45.486298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:18:45.486336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:18:45.486363Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:18:45.486442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:18:45.486487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:18:45.486517Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:18:45.486728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2024-11-21T23:18:45.486820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2024-11-21T23:18:45.486905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=35; 2024-11-21T23:18:45.486988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2024-11-21T23:18:45.487143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:18:45.487204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:18:45.487236Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... e=118; 2024-11-21T23:20:06.977361Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:20:06.977408Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=125; 2024-11-21T23:20:06.977489Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=46; 2024-11-21T23:20:06.977563Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=39; 2024-11-21T23:20:06.977785Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=187; 2024-11-21T23:20:06.978356Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=529; 2024-11-21T23:20:06.978476Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=48; 2024-11-21T23:20:06.978551Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=37; 2024-11-21T23:20:06.978606Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2024-11-21T23:20:06.978652Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2024-11-21T23:20:06.978689Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2024-11-21T23:20:06.978773Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2024-11-21T23:20:06.978819Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2024-11-21T23:20:06.978895Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2024-11-21T23:20:06.978934Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2024-11-21T23:20:06.978991Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2024-11-21T23:20:06.979021Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15424; 2024-11-21T23:20:06.979160Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=21;blobs=36;rows=560000;bytes=33236684;raw_bytes=56296163; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:20:06.979269Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:20:06.979313Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard_impl.cpp:1558;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T23:20:06.979351Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:20:06.979468Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T23:20:06.979569Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T23:20:06.979682Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:20:06.979726Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:20:06.979879Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:20:06.979926Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:20:06.979999Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:20:06.980083Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T23:20:06.980143Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:20:06.980182Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:20:06.980231Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:20:06.980271Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:20:06.980313Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:20:06.980392Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:20:06.981296Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:20:06.981441Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:1575:3444];tablet_id=9437184;parent=[1:1534:3410];fline=manager.h:99;event=ask_data;request=request_id=139;1={portions_count=21};; 2024-11-21T23:20:06.982222Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:20:06.983617Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:20:06.983656Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:20:06.983681Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:20:06.983728Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:20:06.983784Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:20:06.983873Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=5; 2024-11-21T23:20:06.983932Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700013;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:20:06.983982Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=5;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:20:06.984039Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:20:06.984075Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:20:06.984120Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:20:06.984205Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:20:06.984673Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=21;path_id=1; 2024-11-21T23:20:06.985863Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=21;path_id=1; 2024-11-21T23:20:06.987558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:20:06.987626Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:1534:3410];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:19:59.966546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:19:59.966641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:19:59.966674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:19:59.966724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:19:59.966771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:19:59.966803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:19:59.966858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:19:59.967229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:20:00.048454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:20:00.048518Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:00.065785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:20:00.070033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:20:00.070258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:20:00.077417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:20:00.078061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:20:00.078749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:20:00.079109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:20:00.085796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:20:00.087314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:20:00.087381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:20:00.087654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:20:00.087705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:20:00.087744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:20:00.087845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:20:00.095231Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:20:00.268276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:20:00.268520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:20:00.268717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:20:00.268901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:20:00.268949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:20:00.282462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:20:00.282657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:20:00.282872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:20:00.282926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:20:00.282967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:20:00.282994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:20:00.291649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:20:00.291719Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:20:00.291754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:20:00.296149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:20:00.296223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:20:00.296286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:20:00.296358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:20:00.300703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:20:00.311281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:20:00.311559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:20:00.312650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:20:00.312818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:20:00.312869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:20:00.313222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:20:00.313273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:20:00.313467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:20:00.313559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:20:00.323620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:20:00.323685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:20:00.323892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:20:00.323934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:20:00.324324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:20:00.324387Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:20:00.324488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:20:00.324525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:20:00.324567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:20:00.324605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:20:00.324651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:20:00.324700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:20:00.324798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:20:00.324838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:20:00.324876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:20:00.329548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:20:00.329709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:20:00.329754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:20:00.329834Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:20:00.329883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:20:00.330027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 0, tablet: 72057594037968897 2024-11-21T23:20:07.215161Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteOwnerTablets, msg: { Owner: 72075186233409618 TxId: 175 } 2024-11-21T23:20:07.215316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free owner tablets reply, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897, at schemeshard: 72057594046678944 2024-11-21T23:20:07.215440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 175:0, at schemeshard: 72057594046678944, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897 2024-11-21T23:20:07.215494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 HandleReply TDeleteExternalShards, Status: ALREADY, from Hive: 72057594037968897, Owner: 72075186233409618, at schemeshard: 72057594046678944 2024-11-21T23:20:07.215643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2024-11-21T23:20:07.215835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:20:07.215903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2024-11-21T23:20:07.218775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T23:20:07.219047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:20:07.219094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:20:07.219237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2024-11-21T23:20:07.219383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:20:07.219422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2024-11-21T23:20:07.219466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2024-11-21T23:20:07.219951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T23:20:07.220007Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2024-11-21T23:20:07.220047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2024-11-21T23:20:07.221070Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:20:07.221159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:20:07.221191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:20:07.221229Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2024-11-21T23:20:07.221267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:20:07.222765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:20:07.222848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2024-11-21T23:20:07.222883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2024-11-21T23:20:07.222916Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2024-11-21T23:20:07.222948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 6 2024-11-21T23:20:07.223020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2024-11-21T23:20:07.225527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:20:07.225580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:20:07.225599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:20:07.225760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2024-11-21T23:20:07.225792Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2024-11-21T23:20:07.225861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2024-11-21T23:20:07.225883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T23:20:07.225927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2024-11-21T23:20:07.225970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2024-11-21T23:20:07.226005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2024-11-21T23:20:07.226038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2024-11-21T23:20:07.226201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2024-11-21T23:20:07.226979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:20:07.227448Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 74 TabletID: 72075186233409619 2024-11-21T23:20:07.228259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 74 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2024-11-21T23:20:07.228476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 4 Forgetting tablet 72075186233409619 2024-11-21T23:20:07.233578Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 73 TabletID: 72075186233409618 2024-11-21T23:20:07.234318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:20:07.236086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 73 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2024-11-21T23:20:07.236333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2024-11-21T23:20:07.236699Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 75 TabletID: 72075186233409620 Forgetting tablet 72075186233409618 Forgetting tablet 72075186233409620 2024-11-21T23:20:07.238160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 75 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2024-11-21T23:20:07.238369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2024-11-21T23:20:07.239417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:20:07.239461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T23:20:07.239560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2024-11-21T23:20:07.239832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2024-11-21T23:20:07.240194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:20:07.240229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2024-11-21T23:20:07.240280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:20:07.241656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:74 2024-11-21T23:20:07.241696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2024-11-21T23:20:07.243514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:73 2024-11-21T23:20:07.243554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2024-11-21T23:20:07.243662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:75 2024-11-21T23:20:07.243690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2024-11-21T23:20:07.244178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:20:07.245055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2024-11-21T23:20:07.246092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2024-11-21T23:20:07.246131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2024-11-21T23:20:07.247259Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2024-11-21T23:20:07.247340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2024-11-21T23:20:07.247367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:6729:7737] TestWaitNotification: OK eventTxId 175 >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> KqpJoinOrder::TPCDS9-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull >> TColumnShardTestSchema::CreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DeletePrepared-Volatile [GOOD] Test command err: 2024-11-21T23:19:24.719929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:19:24.724187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:24.724381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000d6d/r3tmp/tmpZwss26/pdisk_1.dat 2024-11-21T23:19:26.852215Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2100} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.159382s 2024-11-21T23:19:26.852297Z node 1 :BS_CONTROLLER ERROR: {BSC00@impl.h:2152} StateWork event processing took too much time Type# 2146435078 Duration# 0.159476s 2024-11-21T23:19:26.989846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.178960Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:27.251780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:27.261806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:27.317228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:27.601750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:27.743693Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:19:27.744933Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:19:27.745401Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:19:27.765177Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:19:27.903054Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:19:27.903720Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:19:27.903800Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:19:27.916183Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:19:27.916288Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:19:27.920429Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:19:27.942682Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:19:27.980769Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:19:27.980991Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:19:27.981101Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:19:27.981138Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:19:27.981179Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:19:27.981216Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:27.981638Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:27.981685Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:27.998270Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:19:27.998431Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:19:27.998657Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:27.998715Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.004391Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:19:28.004527Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:19:28.004573Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:19:28.004614Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:19:28.004652Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:19:28.004695Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:19:28.004743Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:19:28.004797Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:19:28.004957Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:19:28.005011Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:19:28.011952Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:19:28.028566Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:19:28.028659Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:19:28.028759Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:19:28.028807Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:19:28.028848Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:19:28.028889Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:19:28.028925Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.029245Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:19:28.029287Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:19:28.029322Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:19:28.029358Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.041742Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:19:28.041829Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:19:28.041901Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:19:28.041941Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.041996Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:19:28.043536Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:19:28.043619Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:19:28.055362Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:19:28.055442Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:19:28.055480Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:19:28.055597Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:19:28.059856Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:19:28.258660Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.258720Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:19:28.258773Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:19:28.258906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:19:28.258968Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:19:28.259095Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:19:28.259141Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:19:28.259197Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:19:28.259241Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:19:28.277485Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:19:28.277554Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:19:28.278012Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.278044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:19:28.278089Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-2 ... to execute [1500:101] at 72075186224037888 on unit LoadTxDetails 2024-11-21T23:20:07.855177Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 got data writeOp from cache 1500:101 2024-11-21T23:20:07.855214Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T23:20:07.855238Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit LoadWriteDetails 2024-11-21T23:20:07.855260Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:20:07.855280Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:20:07.855349Z node 7 :TX_DATASHARD TRACE: Operation [1500:101] is the new logically complete end at 72075186224037888 2024-11-21T23:20:07.855392Z node 7 :TX_DATASHARD TRACE: Operation [1500:101] is the new logically incomplete end at 72075186224037888 2024-11-21T23:20:07.855437Z node 7 :TX_DATASHARD TRACE: Activated operation [1500:101] at 72075186224037888 2024-11-21T23:20:07.855481Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T23:20:07.855507Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:20:07.855528Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit BuildWriteOutRS 2024-11-21T23:20:07.855556Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit BuildWriteOutRS 2024-11-21T23:20:07.855622Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T23:20:07.855645Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit BuildWriteOutRS 2024-11-21T23:20:07.855666Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2024-11-21T23:20:07.855688Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit StoreAndSendWriteOutRS 2024-11-21T23:20:07.855720Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T23:20:07.855743Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2024-11-21T23:20:07.855765Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit PrepareWriteTxInRS 2024-11-21T23:20:07.855787Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit PrepareWriteTxInRS 2024-11-21T23:20:07.855812Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T23:20:07.855833Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit PrepareWriteTxInRS 2024-11-21T23:20:07.855853Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit LoadAndWaitInRS 2024-11-21T23:20:07.855872Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit LoadAndWaitInRS 2024-11-21T23:20:07.855894Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T23:20:07.855916Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit LoadAndWaitInRS 2024-11-21T23:20:07.855936Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit ExecuteWrite 2024-11-21T23:20:07.855955Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit ExecuteWrite 2024-11-21T23:20:07.855988Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [1500:101] at 72075186224037888 2024-11-21T23:20:07.856112Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [1500:101] at 72075186224037888, row count=1 2024-11-21T23:20:07.856162Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2024-11-21T23:20:07.856243Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:20:07.856274Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit ExecuteWrite 2024-11-21T23:20:07.856318Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit CompleteWrite 2024-11-21T23:20:07.856359Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit CompleteWrite 2024-11-21T23:20:07.856550Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is DelayComplete 2024-11-21T23:20:07.856584Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit CompleteWrite 2024-11-21T23:20:07.856622Z node 7 :TX_DATASHARD TRACE: Add [1500:101] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:20:07.856661Z node 7 :TX_DATASHARD TRACE: Trying to execute [1500:101] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:20:07.856697Z node 7 :TX_DATASHARD TRACE: Execution status for [1500:101] at 72075186224037888 is Executed 2024-11-21T23:20:07.856719Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [1500:101] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:20:07.856750Z node 7 :TX_DATASHARD TRACE: Execution plan for [1500:101] at 72075186224037888 has finished 2024-11-21T23:20:07.856796Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:20:07.856868Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:20:07.856918Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:20:07.856961Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:20:07.868101Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037888 step# 1500 txid# 101} 2024-11-21T23:20:07.868212Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2024-11-21T23:20:07.868312Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:20:07.868366Z node 7 :TX_DATASHARD TRACE: Complete execution for [1500:101] at 72075186224037888 on unit CompleteWrite 2024-11-21T23:20:07.868451Z node 7 :TX_DATASHARD DEBUG: Complete write [1500 : 101] from 72075186224037888 at tablet 72075186224037888 send result to client [7:557:2484] 2024-11-21T23:20:07.868513Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:20:07.870187Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:738:2610], Recipient [7:631:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:20:07.870266Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:20:07.870322Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [7:737:2609], serverId# [7:738:2610], sessionId# [0:0:0] 2024-11-21T23:20:07.870528Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:736:2608], Recipient [7:631:2536]: NKikimrTxDataShard.TEvGetInfoRequest 2024-11-21T23:20:07.871702Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:741:2613], Recipient [7:631:2536]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:20:07.871785Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:20:07.871844Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [7:740:2612], serverId# [7:741:2613], sessionId# [0:0:0] 2024-11-21T23:20:07.872054Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:739:2611], Recipient [7:631:2536]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2024-11-21T23:20:07.872138Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2024-11-21T23:20:07.872208Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/101 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2024-11-21T23:20:07.872256Z node 7 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2024-11-21T23:20:07.872331Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2024-11-21T23:20:07.872439Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:20:07.872492Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2024-11-21T23:20:07.872538Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2024-11-21T23:20:07.872581Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2024-11-21T23:20:07.872631Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2024-11-21T23:20:07.872683Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:20:07.872708Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2024-11-21T23:20:07.872733Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2024-11-21T23:20:07.872755Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2024-11-21T23:20:07.872857Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2024-11-21T23:20:07.873145Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:739:2611], 1000} after executionsCount# 1 2024-11-21T23:20:07.873228Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:739:2611], 1000} sends rowCount# 2, bytes# 48, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551567, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2024-11-21T23:20:07.873315Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:739:2611], 1000} finished in read 2024-11-21T23:20:07.873393Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:20:07.873421Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2024-11-21T23:20:07.873449Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2024-11-21T23:20:07.873476Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2024-11-21T23:20:07.873528Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2024-11-21T23:20:07.873549Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2024-11-21T23:20:07.873575Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2024-11-21T23:20:07.873621Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2024-11-21T23:20:07.873738Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: Trying to start YDB, gRPC: 62902, MsgBus: 8880 2024-11-21T23:19:47.545509Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876299222566354:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:47.545795Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002859/r3tmp/tmpg31nZ3/pdisk_1.dat 2024-11-21T23:19:47.964585Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:48.009985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:48.010084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 62902, node 1 2024-11-21T23:19:48.012222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:48.334887Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:48.334908Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:48.334918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:48.335013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8880 TClient is connected to server localhost:8880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:49.083498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.116441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.290456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.477564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:49.569395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:51.446847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876316402437100:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:51.446979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:51.738783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:51.773795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:51.805319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:51.839642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:51.866850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:51.898174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:51.945705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876316402437595:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:51.945783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:51.945826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876316402437600:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:51.948796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:51.961516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876316402437602:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:52.546562Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876299222566354:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:52.546622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"[{column0: 1,column1: 2,column2: 3},{column0: 4,column1: 5,column2: 6}]","Name":"Iterator"}],"Node Type":"ConstantExpr"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 6022, MsgBus: 2225 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002859/r3tmp/tmp3xgGYm/pdisk_1.dat 2024-11-21T23:19:54.079023Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876329862109557:2261];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:54.093458Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:54.168556Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:54.185178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:54.185264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:54.190110Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6022, node 2 2024-11-21T23:19:54.265250Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:54.265279Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:54.265287Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:54.265410Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2225 TClient is connected to server localhost:2225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:54.808156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:54.819019Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:54.838103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:55.033179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:55.193299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.349599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876347041980729:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.349687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.356949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876347041980734:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.361155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:58.377490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876347041980736:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:19:59.034189Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876329862109557:2261];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:59.034274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2024-11-21T23:19:59.588468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.864205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.952797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Inputs":[],"E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, 100)","Key [2000, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 17470, MsgBus: 16500 2024-11-21T23:20:01.506586Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876360468109981:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002859/r3tmp/tmpmv4zrB/pdisk_1.dat 2024-11-21T23:20:01.534620Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:20:01.585291Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:01.608675Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:01.608772Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:01.610583Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17470, node 3 2024-11-21T23:20:01.682936Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:01.682956Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:01.682964Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:01.683069Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16500 TClient is connected to server localhost:16500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:02.235341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:02.246065Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:20:02.258808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:02.400409Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:02.610499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:02.704173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:05.066542Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876377647980712:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:05.066644Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:05.120651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:05.163222Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:05.207951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:05.248973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:05.319964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:05.400786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:05.460990Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876377647981210:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:05.461100Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:05.461254Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876377647981215:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:05.465803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:05.481986Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876377647981217:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:06.503792Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876360468109981:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:06.503865Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:06.722594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::Inner+StreamLookup >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin-ColumnStore >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpJoinOrder::TPCDS87-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] >> ReadIteratorExternalBlobs::NotExtBlobs >> TColumnShardTestSchema::CreateTable [GOOD] >> KqpLimits::TooBigColumn [GOOD] >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin-ColumnStore >> OlapEstimationRowsCorrectness::TPCH2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2024-11-21T23:20:09.985988Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:20:10.106470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:20:10.132332Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:20:10.132443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:135:2167];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:20:10.132735Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:20:10.148310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:20:10.148565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:20:10.148837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:20:10.148963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:20:10.149077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:20:10.149174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:20:10.149285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:20:10.149413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:20:10.149521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:20:10.149615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:20:10.149713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:20:10.149802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:135:2167];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:20:10.179991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:20:10.197613Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:20:10.198024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:20:10.198080Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:20:10.198309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:20:10.198509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:20:10.198591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:20:10.198656Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:20:10.198774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:20:10.198877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:20:10.198921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:20:10.198950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:20:10.199129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:20:10.199195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:20:10.199240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:20:10.199273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:20:10.199363Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:20:10.199418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:20:10.199477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:20:10.199507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:20:10.199574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:20:10.199637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:20:10.199689Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:20:10.199743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:20:10.199779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:20:10.199804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:20:10.200016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=69; 2024-11-21T23:20:10.200118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2024-11-21T23:20:10.200223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=48; 2024-11-21T23:20:10.200309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2024-11-21T23:20:10.200475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:20:10.200530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:20:10.200562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:20:10.200788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:20:10.200845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:20:10.200886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:20:10.201052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:20:10.201098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:20:10.201125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:20:10.201323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:20:10.201367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:20:10.201392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:20:10.201500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=norm ... KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=89197881222336;fline=columnshard__propose_transaction.cpp:104;event=actual tx operator; 2024-11-21T23:20:11.433005Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=88923004813664;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=89197881222336;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=tx_controller.cpp:371;event=start;tx_info=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;; 2024-11-21T23:20:11.433052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=88923004813664;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=89197881222336;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T23:20:11.433113Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;this=88923004813664;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=019:0;;int_this=89197881222336;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2024-11-21T23:20:11.433437Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1018 at tablet 9437184, mediator 0 2024-11-21T23:20:11.433507Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2024-11-21T23:20:11.433640Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvTxProcessing::TEvPlanStep;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:20:11.433878Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2024-11-21T23:20:11.434023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=tables_manager.cpp:259;method=RegisterTable;path_id=19; 2024-11-21T23:20:11.434060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine.h:339;event=RegisterTable;path_id=19; 2024-11-21T23:20:11.434483Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=execute;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T23:20:11.446953Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2024-11-21T23:20:11.447105Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T23:20:11.448247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004816352;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T23:20:11.448481Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004816352;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T23:20:11.465590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;this=88923004816352;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T23:20:11.465708Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;this=88923004816352;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T23:20:11.466902Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004818144;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T23:20:11.467196Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004818144;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T23:20:11.479397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;this=88923004818144;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T23:20:11.479498Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;this=88923004818144;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2024-11-21T23:20:11.480737Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004819936;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=tx_controller.cpp:311;event=start; 2024-11-21T23:20:11.480940Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;self_id=[1:135:2167];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004819936;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=tx_controller.cpp:342;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2024-11-21T23:20:11.504418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;this=88923004819936;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:163:2188]; 2024-11-21T23:20:11.504500Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;this=88923004819936;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:163:2188];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] Test command err: Trying to start YDB, gRPC: 6861, MsgBus: 1747 2024-11-21T23:19:43.999880Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876284315959067:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:44.000017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002879/r3tmp/tmpZykcNM/pdisk_1.dat 2024-11-21T23:19:44.588000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:44.588093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:44.590601Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:44.593273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6861, node 1 2024-11-21T23:19:44.719025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:44.719055Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:44.719063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:44.719179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1747 TClient is connected to server localhost:1747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:45.709488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:45.738192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:45.914309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:46.179157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:46.289115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:48.554886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876305790797119:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.555056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.603566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.668310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.713358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.758827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.814810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.862711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:48.942799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876305790797617:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.942902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.943159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876305790797622:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:48.947911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:48.979467Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876284315959067:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:48.979526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:48.986696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876305790797624:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:50.280104Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d0a80] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=N2M2NWI0My1kZmZiMjZiZi1iNzUzNWIwYy1hMTdhNGExOA==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:41870 2024-11-21T23:19:50.280176Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d1f80] created request Name# ExecuteDataQuery 2024-11-21T23:19:50.280328Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d0a80] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=N2M2NWI0My1kZmZiMjZiZi1iNzUzNWIwYy1hMTdhNGExOA==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:41870 database# /Root 2024-11-21T23:19:50.280726Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jd8gcnr84axvcw33tnqern2m, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:41870, grpcInfo# grpc-c++/1.54.2 grpc-c/31.0.0 (linux; chttp2), timeout# 2.977689s
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:6861 2024-11-21T23:19:53.261784Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439876314380732569:2462] TxId: 281474976710671. Ctx: { TraceId: 01jd8gcnr84axvcw33tnqern2m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M2NWI0My1kZmZiMjZiZi1iNzUzNWIwYy1hMTdhNGExOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2024-11-21T23:19:53.267396Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d1f80] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=N2M2NWI0My1kZmZiMjZiZi1iNzUzNWIwYy1hMTdhNGExOA==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:41884 2024-11-21T23:19:53.267455Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d2680] created request Name# ExecuteDataQuery 2024-11-21T23:19:53.267569Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d1f80] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=N2M2NWI0My1kZmZiMjZiZi1iNzUzNWIwYy1hMTdhNGExOA==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:41884 database# /Root 2024-11-21T23:19:53.289766Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439876314380732576:2471], TxId: 281474976710671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=N2M2NWI0My1kZmZiMjZiZi1iNzUzNWIwYy1hMTdhNGExOA==. CustomerSuppliedId : . TraceId : 01jd8gcnr84axvcw33tnqern2m. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439876314380732569:2462], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:53.290244Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7439876314380732577:2472], TxId: 281474976710671, task: 2. Ctx: { TraceId : 01jd8gcnr84axvcw33tnqern2m. SessionId : ydb://session/3?node_id=1&id=N2M2NWI0My1kZmZiMjZiZi1iNzUzNWIwYy1hMTdhNGExOA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7439876314380732569:2462], status: ABORTED, reason: {
: Error: Terminate execution } 2024-11-21T23:19:53.290893Z node 1 :GRPC_SERVER DEBUG: [0x51b0001d0a80] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:41870 2024-11-21T23:19:53.291358Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jd8gcrnk0r31gt674dzvtgq7, sdkBuildInfo# ydb-cpp-sdk/2.6.2, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:41884, grp ... 03eeb80] received request Name# ListTagsForStream ok# false data# peer# 2024-11-21T23:20:03.189412Z node 2 :GRPC_SERVER DEBUG: [0x51b0003ef280] received request Name# MergeShards ok# false data# peer# 2024-11-21T23:20:03.189490Z node 2 :GRPC_SERVER DEBUG: [0x51b0003ef980] received request Name# RemoveTagsFromStream ok# false data# peer# 2024-11-21T23:20:03.189597Z node 2 :GRPC_SERVER DEBUG: [0x51b000331b80] received request Name# SplitShard ok# false data# peer# 2024-11-21T23:20:03.189703Z node 2 :GRPC_SERVER DEBUG: [0x51b0003cde80] received request Name# StartStreamEncryption ok# false data# peer# 2024-11-21T23:20:03.189792Z node 2 :GRPC_SERVER DEBUG: [0x51b0003cc280] received request Name# StopStreamEncryption ok# false data# peer# 2024-11-21T23:20:03.189879Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c6e80] received request Name# SelfCheck ok# false data# peer# 2024-11-21T23:20:03.189968Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c4480] received request Name# NodeCheck ok# false data# peer# 2024-11-21T23:20:03.190070Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c2880] received request Name# CreateSession ok# false data# peer# 2024-11-21T23:20:03.190175Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c2180] received request Name# DeleteSession ok# false data# peer# 2024-11-21T23:20:03.190267Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c2f80] received request Name# AttachSession ok# false data# peer# 2024-11-21T23:20:03.190354Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c3d80] received request Name# BeginTransaction ok# false data# peer# 2024-11-21T23:20:03.190487Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c4b80] received request Name# CommitTransaction ok# false data# peer# 2024-11-21T23:20:03.190632Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c5980] received request Name# RollbackTransaction ok# false data# peer# 2024-11-21T23:20:03.190765Z node 2 :GRPC_SERVER DEBUG: [0x51b000331480] received request Name# ExecuteQuery ok# false data# peer# 2024-11-21T23:20:03.190909Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c0c80] received request Name# ExecuteScript ok# false data# peer# 2024-11-21T23:20:03.191045Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c1380] received request Name# FetchScriptResults ok# false data# peer# 2024-11-21T23:20:03.191310Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c5280] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2024-11-21T23:20:03.191458Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c6080] received request Name# ChangeTabletSchema ok# false data# peer# 2024-11-21T23:20:03.191608Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c7c80] received request Name# RestartTablet ok# false data# peer# 2024-11-21T23:20:03.191761Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c6780] received request Name# CreateLogStore ok# false data# peer# 2024-11-21T23:20:03.192020Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c8380] received request Name# DescribeLogStore ok# false data# peer# 2024-11-21T23:20:03.192272Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c9880] received request Name# DropLogStore ok# false data# peer# 2024-11-21T23:20:03.192527Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c8a80] received request Name# AlterLogStore ok# false data# peer# 2024-11-21T23:20:03.192688Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c9f80] received request Name# CreateLogTable ok# false data# peer# 2024-11-21T23:20:03.192791Z node 2 :GRPC_SERVER DEBUG: [0x51b0003c7580] received request Name# DescribeLogTable ok# false data# peer# 2024-11-21T23:20:03.192883Z node 2 :GRPC_SERVER DEBUG: [0x51b0003ca680] received request Name# DropLogTable ok# false data# peer# 2024-11-21T23:20:03.192931Z node 2 :GRPC_SERVER DEBUG: [0x51b0003cb480] received request Name# AlterLogTable ok# false data# peer# 2024-11-21T23:20:03.192980Z node 2 :GRPC_SERVER DEBUG: [0x51b0003cc980] received request Name# Login ok# false data# peer# 2024-11-21T23:20:03.193076Z node 2 :GRPC_SERVER DEBUG: [0x51b0003cbb80] received request Name# DescribeReplication ok# false data# peer# Trying to start YDB, gRPC: 11125, MsgBus: 4337 2024-11-21T23:20:04.212563Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876370939423625:2187];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002879/r3tmp/tmp7L1pk1/pdisk_1.dat 2024-11-21T23:20:04.320181Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:20:04.468878Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:04.483150Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:04.483247Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:04.484218Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11125, node 3 2024-11-21T23:20:04.651155Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:04.651181Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:04.651194Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:04.651321Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4337 TClient is connected to server localhost:4337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:05.217267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:05.226260Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:05.236937Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:05.352764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:05.573614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:20:05.655951Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.358571Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876388119294367:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.358664Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.409568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.486710Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.521894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.556777Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.592022Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.645934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.747953Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876388119294872:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.748096Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.750676Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876388119294877:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.754881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:08.767271Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876388119294879:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:09.208807Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876370939423625:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:09.208895Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn [GOOD] Test command err: Trying to start YDB, gRPC: 16281, MsgBus: 19335 2024-11-21T23:17:45.402775Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875776746580042:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:45.402825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002921/r3tmp/tmpC0Kl0u/pdisk_1.dat 2024-11-21T23:17:45.850148Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:45.868235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:45.868358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:45.870290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16281, node 1 2024-11-21T23:17:45.973156Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:45.973180Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:45.973188Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:45.973282Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19335 TClient is connected to server localhost:19335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:46.565594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:46.593104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:17:46.781460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:17:46.998508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:47.081252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:49.243251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875793926450721:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:49.256463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:49.301143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:49.352281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:49.397631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:49.438456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:49.464908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:49.514811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:49.600930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875793926451220:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:49.601012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:49.601485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875793926451225:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:49.604872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:49.623427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875793926451227:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:50.397320Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875776746580042:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:50.397392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:50.729192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:00.850513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:00.850559Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:53.200934Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439876322207430657:2485] TxId: 281474976710672. Ctx: { TraceId: 01jd8g91em7gs2agkncsr6rzc5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IxYzZmYzUtMTEwMGQzZC02NzQ5ZTM5Mi01ODcyMDg4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 50663499 > 50331648 2024-11-21T23:19:53.218566Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2IxYzZmYzUtMTEwMGQzZC02NzQ5ZTM5Mi01ODcyMDg4YQ==, ActorId: [1:7439875798221419151:2485], ActorState: ExecuteState, TraceId: 01jd8g91em7gs2agkncsr6rzc5, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (50663499 > 50331648), code: 200509 Trying to start YDB, gRPC: 13755, MsgBus: 11685 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002921/r3tmp/tmpkYWydF/pdisk_1.dat 2024-11-21T23:19:54.635916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:19:54.637024Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:54.675759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:54.675852Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:54.677111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13755, node 2 2024-11-21T23:19:54.831133Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:54.831157Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:54.831172Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:54.831334Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11685 TClient is connected to server localhost:11685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:55.493926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:55.516730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:55.623502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreat ... : 72057594046644480 2024-11-21T23:19:59.780579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876349482259814:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:59.780688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:59.781038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876349482259819:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:59.786311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:59.810717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876349482259821:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:01.895255Z node 2 :TX_DATASHARD ERROR: Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 2024-11-21T23:20:01.895442Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715671 at tablet 72075186224037914 status: BAD_REQUEST errors: BAD_ARGUMENT (Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914) | 2024-11-21T23:20:01.895859Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7439876358072194789:2462] TxId: 281474976715671. Ctx: { TraceId: 01jd8gd0tqem39fzrasy4w5q4a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGYwMzI1MWQtYjNjNzUwNC00YWViMmRiZS00ZjUyYTExYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: [BAD_ARGUMENT] Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914; 2024-11-21T23:20:01.913347Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGYwMzI1MWQtYjNjNzUwNC00YWViMmRiZS00ZjUyYTExYw==, ActorId: [2:7439876358072194752:2462], ActorState: ExecuteState, TraceId: 01jd8gd0tqem39fzrasy4w5q4a, Create QueryResponse for error on request, msg:
: Error: Bad request., code: 2017
: Error: [BAD_ARGUMENT] Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 Trying to start YDB, gRPC: 28612, MsgBus: 29552 2024-11-21T23:20:03.038674Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876369145564129:2120];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:03.039309Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002921/r3tmp/tmpK8MUDd/pdisk_1.dat 2024-11-21T23:20:03.217049Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:03.238811Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:03.238924Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:03.241100Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28612, node 3 2024-11-21T23:20:03.405235Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:03.405261Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:03.405271Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:03.405433Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29552 TClient is connected to server localhost:29552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:20:03.976864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:20:03.985543Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:03.991483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:20:04.087529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:04.299224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:04.401692Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:08.040716Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876369145564129:2120];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:08.040803Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:08.366946Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876390620402262:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.367045Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.432349Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.513713Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.553933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.629898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.697157Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.780543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:08.836626Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876390620402763:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.836743Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.837546Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876390620402768:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:08.841753Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:08.855089Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876390620402770:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:11.114203Z node 3 :TX_DATASHARD ERROR: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2024-11-21T23:20:11.114355Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710671 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2024-11-21T23:20:11.144287Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7439876399210337779:2464] TxId: 281474976710671. Ctx: { TraceId: 01jd8gd9gt2rypgy2n6pcz3p5n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDkxYWM2YjgtYWM5OWRjYmYtMTQyNGZjNzctZmRmZDZlY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2024-11-21T23:20:11.144953Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDkxYWM2YjgtYWM5OWRjYmYtMTQyNGZjNzctZmRmZDZlY2M=, ActorId: [3:7439876399210337705:2464], ActorState: ExecuteState, TraceId: 01jd8gd9gt2rypgy2n6pcz3p5n, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold >> KqpJoinOrder::FourWayJoinLeftFirst+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin+ColumnStore |87.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 |87.3%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin-ColumnStore >> KqpJoin::IdxLookupSelf [GOOD] >> KqpJoin::JoinAggregateSingleRow >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> KqpFlipJoin::Inner_3 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin-ColumnStore >> KqpFlipJoin::LeftSemi_1 >> KqpJoin::CrossJoinCount [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup >> KqpJoin::JoinWithDuplicates >> KqpJoinOrder::TPCDS90-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS90-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS9_SMALL+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelect_BadCases [GOOD] Test command err: Trying to start YDB, gRPC: 16703, MsgBus: 6868 2024-11-21T23:19:48.707865Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876302263915470:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:48.707907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002851/r3tmp/tmp8ARKSp/pdisk_1.dat 2024-11-21T23:19:49.361461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:49.361568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:49.368049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:49.399745Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16703, node 1 2024-11-21T23:19:49.534970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:49.534995Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:49.535000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:49.535107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6868 TClient is connected to server localhost:6868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:50.116211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:50.153538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:50.346480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:50.541550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:50.640174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:52.256052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876319443786365:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:52.256166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:52.525837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:52.579752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:52.612220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:52.647087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:52.720020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:52.761552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:52.868973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876319443786868:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:52.869063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:52.869271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876319443786873:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:52.872387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:52.885103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876319443786875:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:53.707944Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876302263915470:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:53.708014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25942, MsgBus: 61618 2024-11-21T23:19:55.392984Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876334862053498:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:55.393043Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002851/r3tmp/tmpgsPMrc/pdisk_1.dat 2024-11-21T23:19:55.536386Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:55.564708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:55.564827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:55.570288Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25942, node 2 2024-11-21T23:19:55.645392Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:55.645417Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:55.645424Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:55.645515Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61618 TClient is connected to server localhost:61618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:56.175818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:56.190221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:56.257928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.451720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:56.529358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.670563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876347746957077:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.670676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Roo ... _V2;id=15; 2024-11-21T23:20:12.298703Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:20:12.298808Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:20:12.298831Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:20:13.983079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2024-11-21T23:20:14.095483Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038007 not found 2024-11-21T23:20:14.095522Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038005 not found 2024-11-21T23:20:14.095537Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038003 not found 2024-11-21T23:20:14.095550Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037999 not found 2024-11-21T23:20:14.095565Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037997 not found 2024-11-21T23:20:14.095582Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037995 not found 2024-11-21T23:20:14.095595Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037991 not found 2024-11-21T23:20:14.095632Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038001 not found 2024-11-21T23:20:14.095647Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037993 not found 2024-11-21T23:20:14.095661Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038020 not found 2024-11-21T23:20:14.095676Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038022 not found 2024-11-21T23:20:14.095690Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038018 not found 2024-11-21T23:20:14.095705Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038024 not found 2024-11-21T23:20:14.095721Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038016 not found 2024-11-21T23:20:14.095738Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038012 not found 2024-11-21T23:20:14.095752Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038010 not found 2024-11-21T23:20:14.095773Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038014 not found 2024-11-21T23:20:14.109794Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037972 not found 2024-11-21T23:20:14.130266Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038008 not found 2024-11-21T23:20:14.130306Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038027 not found 2024-11-21T23:20:14.130321Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037970 not found 2024-11-21T23:20:14.130336Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037968 not found 2024-11-21T23:20:14.130350Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037989 not found 2024-11-21T23:20:14.130953Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037987 not found 2024-11-21T23:20:14.130969Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037977 not found 2024-11-21T23:20:14.130984Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038025 not found 2024-11-21T23:20:14.131000Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037975 not found 2024-11-21T23:20:14.131014Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037966 not found 2024-11-21T23:20:14.131027Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037983 not found 2024-11-21T23:20:14.131041Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038004 not found 2024-11-21T23:20:14.131056Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037985 not found 2024-11-21T23:20:14.131068Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037979 not found 2024-11-21T23:20:14.131082Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037981 not found 2024-11-21T23:20:14.131097Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038019 not found 2024-11-21T23:20:14.131114Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038000 not found 2024-11-21T23:20:14.131129Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037998 not found 2024-11-21T23:20:14.131144Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038002 not found 2024-11-21T23:20:14.131159Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037988 not found 2024-11-21T23:20:14.136980Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038021 not found 2024-11-21T23:20:14.137128Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037984 not found 2024-11-21T23:20:14.137144Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037990 not found 2024-11-21T23:20:14.137158Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038026 not found 2024-11-21T23:20:14.137172Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038006 not found 2024-11-21T23:20:14.137186Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037973 not found 2024-11-21T23:20:14.137200Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037971 not found 2024-11-21T23:20:14.137212Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037994 not found 2024-11-21T23:20:14.137224Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038017 not found 2024-11-21T23:20:14.137238Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038011 not found 2024-11-21T23:20:14.137253Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038028 not found 2024-11-21T23:20:14.137266Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037992 not found 2024-11-21T23:20:14.137278Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038023 not found 2024-11-21T23:20:14.137304Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037967 not found 2024-11-21T23:20:14.137333Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037965 not found 2024-11-21T23:20:14.137350Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038013 not found 2024-11-21T23:20:14.137363Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037996 not found 2024-11-21T23:20:14.137379Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037976 not found 2024-11-21T23:20:14.137393Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037982 not found 2024-11-21T23:20:14.137406Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038015 not found 2024-11-21T23:20:14.137424Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037978 not found 2024-11-21T23:20:14.141929Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037986 not found 2024-11-21T23:20:14.141947Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037980 not found 2024-11-21T23:20:14.141963Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037969 not found 2024-11-21T23:20:14.141976Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038009 not found 2024-11-21T23:20:14.141990Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037974 not found 2024-11-21T23:20:14.377917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:1, at schemeshard: 72057594046644480 2024-11-21T23:20:14.552723Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2FlMGFiZjItNTQ0NjQxNzktMmUzMjc3YTMtMmJkMjNjMWU=, ActorId: [3:7439876415516170166:3845], ActorState: ExecuteState, TraceId: 01jd8gdd7084a86afv7hxvhvvx, Create QueryResponse for error on request, msg: 2024-11-21T23:20:14.793746Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:1, at schemeshard: 72057594046644480 2024-11-21T23:20:15.728188Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715695, at schemeshard: 72057594046644480 2024-11-21T23:20:15.733336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715696:1, at schemeshard: 72057594046644480 2024-11-21T23:20:16.478362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:20:16.478414Z node 3 :IMPORT WARN: Table profiles were not loaded >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup >> KqpJoinOrder::TPCH11-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH11+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> KqpIndexLookupJoin::MultiJoins >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride+StreamLookupJoin-ColumnStore >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TConsoleConfigTests::TestAddConfigItem >> KqpJoinOrder::TPCH10+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH10-StreamLookupJoin+ColumnStore >> KqpFlipJoin::Right_2 [GOOD] >> KqpFlipJoin::Right_3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 14485, MsgBus: 13001 2024-11-21T23:19:45.657486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876289649074540:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:45.661521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00286d/r3tmp/tmpPDevoL/pdisk_1.dat 2024-11-21T23:19:46.272413Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:46.290085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:46.290179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:46.293997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14485, node 1 2024-11-21T23:19:46.432555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:46.432574Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:46.432587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:46.432667Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13001 TClient is connected to server localhost:13001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:47.237072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.281427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.416550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.662271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:47.770049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:50.107255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876306828945281:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.119511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.153367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.183740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.253362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.298844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.344687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.378161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:50.519873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876311123913081:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.519959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.520302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876311123913086:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:50.523371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:50.534159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876311123913088:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:50.658043Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876289649074540:2192];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:50.658092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20684, MsgBus: 12977 2024-11-21T23:19:52.781944Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876322461077491:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:52.782001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/00286d/r3tmp/tmp9Z4wpa/pdisk_1.dat 2024-11-21T23:19:52.985370Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:52.986714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:52.986794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:52.991161Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20684, node 2 2024-11-21T23:19:53.074932Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:53.074956Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:53.074963Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:53.075067Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12977 TClient is connected to server localhost:12977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:53.486900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.491830Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:19:53.508307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.587845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.753647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.834292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:56.575493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876339640948379:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... # peer# 2024-11-21T23:20:20.990379Z node 5 :GRPC_SERVER DEBUG: [0x51b000147480] received request Name# TopicService/DropTopic ok# false data# peer# 2024-11-21T23:20:20.990666Z node 5 :GRPC_SERVER DEBUG: [0x51b00028e880] received request Name# Coordination/CreateNode ok# false data# peer# 2024-11-21T23:20:20.990885Z node 5 :GRPC_SERVER DEBUG: [0x51b00028a980] received request Name# Coordination/AlterNode ok# false data# peer# 2024-11-21T23:20:20.991088Z node 5 :GRPC_SERVER DEBUG: [0x51b000290480] received request Name# Coordination/DropNode ok# false data# peer# 2024-11-21T23:20:20.991314Z node 5 :GRPC_SERVER DEBUG: [0x51b00028fd80] received request Name# Coordination/DescribeNode ok# false data# peer# 2024-11-21T23:20:20.991512Z node 5 :GRPC_SERVER DEBUG: [0x51b000291280] received request Name# CreateDatabase ok# false data# peer# 2024-11-21T23:20:20.991753Z node 5 :GRPC_SERVER DEBUG: [0x51b00028b080] received request Name# GetDatabaseStatus ok# false data# peer# 2024-11-21T23:20:20.991955Z node 5 :GRPC_SERVER DEBUG: [0x51b00028be80] received request Name# AlterDatabase ok# false data# peer# 2024-11-21T23:20:20.992179Z node 5 :GRPC_SERVER DEBUG: [0x51b00028c580] received request Name# ListDatabases ok# false data# peer# 2024-11-21T23:20:20.992261Z node 5 :GRPC_SERVER DEBUG: [0x51b00028cc80] received request Name# RemoveDatabase ok# false data# peer# 2024-11-21T23:20:20.992378Z node 5 :GRPC_SERVER DEBUG: [0x51b00028da80] received request Name# DescribeDatabaseOptions ok# false data# peer# 2024-11-21T23:20:20.992457Z node 5 :GRPC_SERVER DEBUG: [0x51b00028e180] received request Name# GetScaleRecommendation ok# false data# peer# 2024-11-21T23:20:20.992570Z node 5 :GRPC_SERVER DEBUG: [0x51b0002c2280] received request Name# ListEndpoints ok# false data# peer# 2024-11-21T23:20:20.992651Z node 5 :GRPC_SERVER DEBUG: [0x51b00028f680] received request Name# WhoAmI ok# false data# peer# 2024-11-21T23:20:20.992674Z node 5 :GRPC_SERVER DEBUG: [0x51b000291980] received request Name# NodeRegistration ok# false data# peer# 2024-11-21T23:20:20.992892Z node 5 :GRPC_SERVER DEBUG: [0x51b000294a80] received request Name# GetShardLocations ok# false data# peer# 2024-11-21T23:20:20.992892Z node 5 :GRPC_SERVER DEBUG: [0x51b000293c80] received request Name# Scan ok# false data# peer# 2024-11-21T23:20:20.993123Z node 5 :GRPC_SERVER DEBUG: [0x51b000294380] received request Name# DescribeTable ok# false data# peer# 2024-11-21T23:20:20.993127Z node 5 :GRPC_SERVER DEBUG: [0x51b000295880] received request Name# CreateSnapshot ok# false data# peer# 2024-11-21T23:20:20.993330Z node 5 :GRPC_SERVER DEBUG: [0x51b000295180] received request Name# RefreshSnapshot ok# false data# peer# 2024-11-21T23:20:20.993336Z node 5 :GRPC_SERVER DEBUG: [0x51b000296680] received request Name# DiscardSnapshot ok# false data# peer# 2024-11-21T23:20:20.993548Z node 5 :GRPC_SERVER DEBUG: [0x51b00029ba80] received request Name# List ok# false data# peer# 2024-11-21T23:20:20.993557Z node 5 :GRPC_SERVER DEBUG: [0x51b000295f80] received request Name# RateLimiter/CreateResource ok# false data# peer# 2024-11-21T23:20:20.993750Z node 5 :GRPC_SERVER DEBUG: [0x51b000285c80] received request Name# RateLimiter/AlterResource ok# false data# peer# 2024-11-21T23:20:20.993805Z node 5 :GRPC_SERVER DEBUG: [0x51b000293580] received request Name# RateLimiter/DropResource ok# false data# peer# 2024-11-21T23:20:20.993948Z node 5 :GRPC_SERVER DEBUG: [0x51b000290b80] received request Name# RateLimiter/ListResources ok# false data# peer# 2024-11-21T23:20:20.994000Z node 5 :GRPC_SERVER DEBUG: [0x51b00028ef80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2024-11-21T23:20:20.994154Z node 5 :GRPC_SERVER DEBUG: [0x51b00028d380] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2024-11-21T23:20:20.994204Z node 5 :GRPC_SERVER DEBUG: [0x51b000289b80] received request Name# CreateStream ok# false data# peer# 2024-11-21T23:20:20.994348Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c7780] received request Name# ListStreams ok# false data# peer# 2024-11-21T23:20:20.994416Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c6280] received request Name# DeleteStream ok# false data# peer# 2024-11-21T23:20:20.994600Z node 5 :GRPC_SERVER DEBUG: [0x51b00028b780] received request Name# DescribeStream ok# false data# peer# 2024-11-21T23:20:20.994639Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c5b80] received request Name# ListShards ok# false data# peer# 2024-11-21T23:20:20.994802Z node 5 :GRPC_SERVER DEBUG: [0x51b000246580] received request Name# SetWriteQuota ok# false data# peer# 2024-11-21T23:20:20.994850Z node 5 :GRPC_SERVER DEBUG: [0x51b00035b480] received request Name# UpdateStream ok# false data# peer# 2024-11-21T23:20:20.995027Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c5480] received request Name# PutRecord ok# false data# peer# 2024-11-21T23:20:20.995065Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c4d80] received request Name# PutRecords ok# false data# peer# 2024-11-21T23:20:20.995234Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c4680] received request Name# GetRecords ok# false data# peer# 2024-11-21T23:20:20.995280Z node 5 :GRPC_SERVER DEBUG: [0x51b0000dba80] received request Name# GetShardIterator ok# false data# peer# 2024-11-21T23:20:20.995431Z node 5 :GRPC_SERVER DEBUG: [0x51b0003eeb80] received request Name# SubscribeToShard ok# false data# peer# 2024-11-21T23:20:20.995472Z node 5 :GRPC_SERVER DEBUG: [0x51b0000bb480] received request Name# DescribeLimits ok# false data# peer# 2024-11-21T23:20:20.995666Z node 5 :GRPC_SERVER DEBUG: [0x51b0000bd080] received request Name# DescribeStreamSummary ok# false data# peer# 2024-11-21T23:20:20.995691Z node 5 :GRPC_SERVER DEBUG: [0x51b0002e9f80] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2024-11-21T23:20:20.995885Z node 5 :GRPC_SERVER DEBUG: [0x51b000246c80] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2024-11-21T23:20:20.995906Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c0080] received request Name# UpdateShardCount ok# false data# peer# 2024-11-21T23:20:20.996098Z node 5 :GRPC_SERVER DEBUG: [0x51b00029eb80] received request Name# UpdateStreamMode ok# false data# peer# 2024-11-21T23:20:20.996107Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c2a80] received request Name# RegisterStreamConsumer ok# false data# peer# 2024-11-21T23:20:20.996313Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c3180] received request Name# DescribeStreamConsumer ok# false data# peer# 2024-11-21T23:20:20.996320Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c3880] received request Name# DeregisterStreamConsumer ok# false data# peer# 2024-11-21T23:20:20.996529Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c1c80] received request Name# ListStreamConsumers ok# false data# peer# 2024-11-21T23:20:20.996545Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c2380] received request Name# AddTagsToStream ok# false data# peer# 2024-11-21T23:20:20.996742Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c1580] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2024-11-21T23:20:20.996768Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c0e80] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2024-11-21T23:20:20.996960Z node 5 :GRPC_SERVER DEBUG: [0x51b0001c0780] received request Name# ListTagsForStream ok# false data# peer# 2024-11-21T23:20:20.996978Z node 5 :GRPC_SERVER DEBUG: [0x51b0001ac580] received request Name# MergeShards ok# false data# peer# 2024-11-21T23:20:20.997178Z node 5 :GRPC_SERVER DEBUG: [0x51b00035c280] received request Name# RemoveTagsFromStream ok# false data# peer# 2024-11-21T23:20:20.997189Z node 5 :GRPC_SERVER DEBUG: [0x51b00035c980] received request Name# SplitShard ok# false data# peer# 2024-11-21T23:20:20.997379Z node 5 :GRPC_SERVER DEBUG: [0x51b00035bb80] received request Name# StartStreamEncryption ok# false data# peer# 2024-11-21T23:20:20.997391Z node 5 :GRPC_SERVER DEBUG: [0x51b000356e80] received request Name# StopStreamEncryption ok# false data# peer# 2024-11-21T23:20:20.997607Z node 5 :GRPC_SERVER DEBUG: [0x51b0002f3280] received request Name# SelfCheck ok# false data# peer# 2024-11-21T23:20:20.997616Z node 5 :GRPC_SERVER DEBUG: [0x51b000245e80] received request Name# NodeCheck ok# false data# peer# 2024-11-21T23:20:20.997826Z node 5 :GRPC_SERVER DEBUG: [0x51b000285580] received request Name# CreateSession ok# false data# peer# 2024-11-21T23:20:20.997827Z node 5 :GRPC_SERVER DEBUG: [0x51b0000bbb80] received request Name# DeleteSession ok# false data# peer# 2024-11-21T23:20:20.998049Z node 5 :GRPC_SERVER DEBUG: [0x51b000296d80] received request Name# AttachSession ok# false data# peer# 2024-11-21T23:20:20.998065Z node 5 :GRPC_SERVER DEBUG: [0x51b000297b80] received request Name# BeginTransaction ok# false data# peer# 2024-11-21T23:20:20.998247Z node 5 :GRPC_SERVER DEBUG: [0x51b000059b80] received request Name# CommitTransaction ok# false data# peer# 2024-11-21T23:20:20.998279Z node 5 :GRPC_SERVER DEBUG: [0x51b00005a980] received request Name# RollbackTransaction ok# false data# peer# 2024-11-21T23:20:20.998554Z node 5 :GRPC_SERVER DEBUG: [0x51b0002df080] received request Name# ExecuteQuery ok# false data# peer# 2024-11-21T23:20:20.998782Z node 5 :GRPC_SERVER DEBUG: [0x51b0002e4480] received request Name# ExecuteScript ok# false data# peer# 2024-11-21T23:20:20.998994Z node 5 :GRPC_SERVER DEBUG: [0x51b0000bc280] received request Name# FetchScriptResults ok# false data# peer# 2024-11-21T23:20:20.999196Z node 5 :GRPC_SERVER DEBUG: [0x51b00029ac80] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2024-11-21T23:20:20.999403Z node 5 :GRPC_SERVER DEBUG: [0x51b00029b380] received request Name# ChangeTabletSchema ok# false data# peer# 2024-11-21T23:20:20.999612Z node 5 :GRPC_SERVER DEBUG: [0x51b00029c880] received request Name# RestartTablet ok# false data# peer# 2024-11-21T23:20:20.999848Z node 5 :GRPC_SERVER DEBUG: [0x51b00029c180] received request Name# CreateLogStore ok# false data# peer# 2024-11-21T23:20:21.000094Z node 5 :GRPC_SERVER DEBUG: [0x51b00029e480] received request Name# DescribeLogStore ok# false data# peer# 2024-11-21T23:20:21.000313Z node 5 :GRPC_SERVER DEBUG: [0x51b0003ee480] received request Name# DropLogStore ok# false data# peer# 2024-11-21T23:20:21.000553Z node 5 :GRPC_SERVER DEBUG: [0x51b0003af480] received request Name# CreateLogTable ok# false data# peer# 2024-11-21T23:20:21.000551Z node 5 :GRPC_SERVER DEBUG: [0x51b0003ef280] received request Name# AlterLogStore ok# false data# peer# 2024-11-21T23:20:21.000770Z node 5 :GRPC_SERVER DEBUG: [0x51b0003f0080] received request Name# DescribeLogTable ok# false data# peer# 2024-11-21T23:20:21.000797Z node 5 :GRPC_SERVER DEBUG: [0x51b0003ed680] received request Name# DropLogTable ok# false data# peer# 2024-11-21T23:20:21.000993Z node 5 :GRPC_SERVER DEBUG: [0x51b0003edd80] received request Name# AlterLogTable ok# false data# peer# 2024-11-21T23:20:21.001005Z node 5 :GRPC_SERVER DEBUG: [0x51b0003ecf80] received request Name# Login ok# false data# peer# 2024-11-21T23:20:21.001208Z node 5 :GRPC_SERVER DEBUG: [0x51b0003b1780] received request Name# DescribeReplication ok# false data# peer# >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull >> KqpJoin::JoinWithDuplicates [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin >> KqpJoin::JoinAggregateSingleRow [GOOD] >> KqpJoin::JoinAggregate >> KqpFlipJoin::LeftSemi_1 [GOOD] >> KqpFlipJoin::LeftSemi_2 >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAffectedConfigs >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] >> KqpJoin::JoinDupColumnRight >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] Test command err: BASE_PERF = 2.940146654 less-than assertion failed at ydb/core/viewer/viewer_ut.cpp:105, virtual void NTestSuiteViewer::TTestCaseTabletMerging::Execute_(NUnitTest::TTestContext &): timer.Passed() < 8 * BASE_PERF NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x17C68470) NTestSuiteViewer::TTestCaseTabletMerging::Execute_(NUnitTest::TTestContext&)+6081 (0x172FA5F1) std::__y1::__function::__func, void ()>::operator()()+280 (0x17396898) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+537 (0x17CA7369) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x17C6EFD9) NTestSuiteViewer::TCurrentTest::Execute()+1204 (0x17395644) NUnitTest::TTestFactory::Execute()+2438 (0x17C708A6) NUnitTest::RunMain(int, char**)+5149 (0x17CA0FAD) ??+0 (0x7F9093CF6D90) __libc_start_main+128 (0x7F9093CF6E40) _start+41 (0x153F9029) 2024-11-21T23:18:02.359785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1520:2384], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:02.360557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:02.361615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:18:02.362596Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1524:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:02.362886Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1518:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:02.363050Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1522:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:02.363352Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:02.363535Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:02.363872Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:18:02.363919Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:02.363995Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:18:02.364188Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:18:02.366086Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:1526:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:18:02.366789Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:18:02.367394Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:18:02.860909Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:03.023458Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:102} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2024-11-21T23:18:03.115923Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:406} Magic sector is present on disk, now going to format device PDiskId# 1000 2024-11-21T23:18:03.751395Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:346} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 1305, node 1 TClient is connected to server localhost:7079 2024-11-21T23:18:04.073321Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:04.073397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:04.073443Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:04.074023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:08.267081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:08.267241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:08.286492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:08.286621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:08.288723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:08.288815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:08.289099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:08.289162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:08.289519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:08.289581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:08.308388Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:08.308856Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:18:08.309060Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2024-11-21T23:18:08.309167Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2024-11-21T23:18:08.309442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:08.309999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:08.310203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:08.310383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:08.310622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:53.978187Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439876068891118806:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:53.978298Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2024-11-21T23:18:54.173541Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:54.178111Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:54.178209Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:54.181163Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14959, node 6 2024-11-21T23:18:54.240520Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:54.240543Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:54.240554Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:54.240733Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:54.683826Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:54.754078Z node 6 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2024-11-21T23:18:58.176601Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876090365955922:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:58.176600Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876090365955937:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:58.176724Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:58.181652Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2024-11-21T23:18:58.198266Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439876090365955960:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2024-11-21T23:18:58.756803Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:18:58.978256Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439876068891118806:2062];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:58.978331Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1+StreamLookupJoin-ColumnStore |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> KqpJoinOrder::TPCDS94-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231760.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132231760.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231760.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112231760.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230560.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112230560.000000s;Name=;Codec=}; 2024-11-21T23:19:23.090672Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:19:23.230289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:19:23.261408Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:19:23.261509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:19:23.261817Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:19:23.271128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:23.271379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:23.271701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:23.271842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:23.271954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:19:23.272093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:19:23.272219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:19:23.272345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:19:23.272488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:19:23.272625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:19:23.272733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:19:23.272835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:19:23.298283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:19:23.298513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:19:23.315046Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:19:23.315404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:19:23.315468Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:19:23.315717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:23.315907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:23.315997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:23.316045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:19:23.316138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:19:23.316245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:23.316293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:23.316326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:19:23.316496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:23.316551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:23.316590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:23.316619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:19:23.316746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:19:23.316800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:23.316841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:23.316868Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:19:23.316940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:23.316976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:23.317007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:19:23.317053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:23.317090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:23.317118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:19:23.317312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2024-11-21T23:19:23.317404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2024-11-21T23:19:23.317494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2024-11-21T23:19:23.317575Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2024-11-21T23:19:23.317738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:23.317790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:23.317822Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:19:23.318042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:19:23.318093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:19:23.318122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:19:23.318317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:20:25.338697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] finished for tablet 9437184 2024-11-21T23:20:25.338762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] send ScanData to [1:685:2688] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:20:25.339209Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:686:2689] and sent to [1:685:2688] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.012},{"events":["l_bootstrap"],"t":0.023},{"events":["f_processing","f_task_result"],"t":0.024},{"events":["f_ack"],"t":0.208},{"events":["l_task_result"],"t":0.289},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.292}],"full":{"a":1732231225046595,"name":"_full_task","f":1732231225046595,"d_finished":0,"c":0,"l":1732231225338801,"d":292206},"events":[{"name":"bootstrap","f":1732231225046815,"d_finished":23204,"c":1,"l":1732231225070019,"d":23204},{"a":1732231225338442,"name":"ack","f":1732231225255289,"d_finished":2176,"c":3,"l":1732231225338336,"d":2535},{"a":1732231225338431,"name":"processing","f":1732231225071390,"d_finished":174256,"c":24,"l":1732231225338338,"d":174626},{"name":"ProduceResults","f":1732231225059461,"d_finished":7534,"c":29,"l":1732231225338684,"d":7534},{"a":1732231225338687,"name":"Finish","f":1732231225338687,"d_finished":0,"c":0,"l":1732231225338801,"d":114},{"name":"task_result","f":1732231225071408,"d_finished":171537,"c":21,"l":1732231225336082,"d":171537}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:686:2689]->[1:685:2688] 2024-11-21T23:20:25.339297Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:20:25.046081Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2024-11-21T23:20:25.339336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:20:25.339504Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.205463s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.336640s;size=0.002211992;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.017091s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.012938s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:20:25.339575Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:20:25.341338Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2024-11-21T23:20:25.341544Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2024-11-21T23:20:25.341676Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T23:20:25.341871Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T23:20:25.342011Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T23:20:25.342544Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:702:2705];trace_detailed=; 2024-11-21T23:20:25.342919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T23:20:25.343155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:20:25.343335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:20:25.343475Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:20:25.343711Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:20:25.343828Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:20:25.343959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:20:25.344000Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] finished for tablet 9437184 2024-11-21T23:20:25.344101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] send ScanData to [1:701:2704] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:20:25.344592Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:702:2705] and sent to [1:701:2704] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1732231225342472,"name":"_full_task","f":1732231225342472,"d_finished":0,"c":0,"l":1732231225344158,"d":1686},"events":[{"name":"bootstrap","f":1732231225342652,"d_finished":858,"c":1,"l":1732231225343510,"d":858},{"a":1732231225343688,"name":"ack","f":1732231225343688,"d_finished":0,"c":0,"l":1732231225344158,"d":470},{"a":1732231225343671,"name":"processing","f":1732231225343671,"d_finished":0,"c":0,"l":1732231225344158,"d":487},{"name":"ProduceResults","f":1732231225343262,"d_finished":492,"c":2,"l":1732231225343987,"d":492},{"a":1732231225343989,"name":"Finish","f":1732231225343989,"d_finished":0,"c":0,"l":1732231225344158,"d":169}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:702:2705]->[1:701:2704] 2024-11-21T23:20:25.344709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:20:25.342075Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T23:20:25.344765Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:20:25.344817Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T23:20:25.344927Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] >> KqpStats::SysViewCancelled [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull >> KqpTypes::DyNumberCompare >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin-ColumnStore >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> KqpJoin::ComplexJoin >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |87.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2024-11-21T23:14:42.207612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:42.207681Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:42.303584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:43.439082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:43.439162Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:43.507600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:44.524485Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:44.524546Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:44.564540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:45.904782Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:45.904872Z node 4 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:45.967577Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:47.948765Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:47.948870Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:48.014491Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:49.260676Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:49.260749Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:49.310909Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:50.578360Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:50.578574Z node 7 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:50.686829Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:51.996053Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:51.996116Z node 8 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:52.044701Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:53.093538Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:53.093629Z node 9 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:53.198906Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:55.488814Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:55.488907Z node 10 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:55.572975Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:57.739186Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:57.739287Z node 11 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:57.790544Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:14:59.700773Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:14:59.700872Z node 12 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:14:59.765177Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:01.537108Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:01.537181Z node 13 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:01.603840Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:03.316459Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:03.316548Z node 14 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:03.376205Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:05.140132Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:05.140220Z node 15 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:05.193080Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:06.691169Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:06.691254Z node 16 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:06.742926Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:08.432940Z node 17 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:08.433024Z node 17 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:08.495977Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:10.445312Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:10.445394Z node 18 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:10.495485Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:10.932405Z node 18 :CMS_CONFIGS ERROR: Unexpected config sender died for subscription id=1 2024-11-21T23:15:11.562349Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:11.562464Z node 19 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:11.607472Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:12.287356Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:12.287471Z node 19 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:12.382389Z node 19 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2024-11-21T23:15:12.927110Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:12.927205Z node 20 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:12.967197Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:13.667850Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:13.667940Z node 20 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:13.762681Z node 20 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[20:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2024-11-21T23:15:14.336278Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:14.336409Z node 21 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:14.387970Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:17.293024Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:17.293127Z node 22 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:17.342334Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:19.946071Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:19.946138Z node 23 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:19.989590Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:21.046380Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:21.046468Z node 24 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:21.090121Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:22.146052Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:15:22.146125Z node 25 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:22.185492Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:15:27.089188Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:15:27.089300Z node 25 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:42.480772Z node 25 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2024-11-21T23:18:43.498424Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:18:43.498509Z node 26 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:18:43.544289Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:18:48.581447Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:48.581551Z node 26 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:23.547315Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:20:23.547435Z node 27 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:23.649612Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2024-11-21T23:20:25.911925Z node 28 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:20:25.912024Z node 28 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:25.956270Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> KqpIndexLookupJoin::LeftOnly+StreamLookup >> KqpFlipJoin::Right_3 [GOOD] >> KqpJoinOrder::TPCDS87-StreamLookupJoin-ColumnStore [GOOD] >> KqpIndexLookupJoin::MultiJoins [GOOD] >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup >> KqpIndexLookupJoin::CheckAllKeyTypesCast >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] >> KqpJoin::JoinLeftPureInner >> OlapEstimationRowsCorrectness::TPCH10 >> BasicUsage::RecreateObserver [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull |87.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup >> KqpJoin::JoinDupColumnRight [GOOD] >> KqpJoin::JoinDupColumnRightPure >> KqpJoinOrder::FiveWayJoinStatsOverride+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin+ColumnStore >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 >> KqpJoin::JoinAggregate [GOOD] >> KqpJoin::JoinConvert >> KqpJoinOrder::TPCH11+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2024-11-21T23:18:34.754468Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1732231114754438 2024-11-21T23:18:35.293162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875991573808878:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:35.293228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:35.385831Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875988088427115:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:35.628995Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001324/r3tmp/tmp5LvO68/pdisk_1.dat 2024-11-21T23:18:35.649121Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:18:35.672646Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:18:35.919541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:35.919670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:35.921204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:35.921247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:35.937190Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:18:35.937374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:35.938007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:35.971075Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30760, node 1 2024-11-21T23:18:36.041332Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:36.052777Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:18:36.209555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/001324/r3tmp/yandexMDiyzb.tmp 2024-11-21T23:18:36.209588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/001324/r3tmp/yandexMDiyzb.tmp 2024-11-21T23:18:36.209770Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/001324/r3tmp/yandexMDiyzb.tmp 2024-11-21T23:18:36.209887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:18:36.368050Z INFO: TTestServer started on Port 21143 GrpcPort 30760 TClient is connected to server localhost:21143 PQClient connected to localhost:30760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:36.707501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2024-11-21T23:18:39.108876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876005268296469:2284], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.109046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876005268296447:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.109656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:39.116271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:18:39.130233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876005268296476:2285], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:18:39.481661Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876005268296519:2289], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:39.481985Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDU3MzY3MTctODUzZDViZC00NWRhMTdiNi04MmQ2YTYyMA==, ActorId: [2:7439876005268296445:2280], ActorState: ExecuteState, TraceId: 01jd8gag82akcqvm4v0c3xe7v3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:39.484975Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:39.492920Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876008753679118:2309], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:18:39.496502Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWJiNGVkNjMtMzY3YmEyN2EtYjEyZTI5MzktYTQ0ZmJjMzg=, ActorId: [1:7439876008753679068:2301], ActorState: ExecuteState, TraceId: 01jd8gagb03q6b45vaq0p5q77q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:18:39.497034Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:18:39.500465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.694314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:39.832794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:30760", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:18:40.170006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jd8gah35ft7s5qhf09ycyqj4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzcwODk0YmQtY2M5MjFmODQtODU1OTliNGUtODhkZDI4MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439876013048646814:2952] 2024-11-21T23:18:40.293951Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875991573808878:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:40.294034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:40.379534Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439875988088427115:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:40.379624Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2024-11-21T23:18:46.249358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:30760 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC 2024-11-21T23:18:46.396689Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC request to localhost:30760 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Local ... 135f-8702e592] Counters: { Errors: 0 CurrentSessionLifetimeMs: 75 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:20:31.223397Z :NOTICE: [/Root] [/Root] [4fb1273e-ff62fa9a-8443135f-8702e592] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:20:31.223423Z :DEBUG: [/Root] [/Root] [4fb1273e-ff62fa9a-8443135f-8702e592] [] Abort session to cluster 2024-11-21T23:20:31.223717Z :INFO: [/Root] [/Root] [7820fc23-f5db1b4a-e8ff5f46-df8ffa99] Closing read session. Close timeout: 0.000000s 2024-11-21T23:20:31.223751Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:20:31.223782Z :INFO: [/Root] [/Root] [7820fc23-f5db1b4a-e8ff5f46-df8ffa99] Counters: { Errors: 0 CurrentSessionLifetimeMs: 75 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:20:31.223831Z :NOTICE: [/Root] [/Root] [7820fc23-f5db1b4a-e8ff5f46-df8ffa99] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2024-11-21T23:20:31.223856Z :DEBUG: [/Root] [/Root] [7820fc23-f5db1b4a-e8ff5f46-df8ffa99] [] Abort session to cluster 2024-11-21T23:20:31.223287Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_4921293510004579211_v1 grpc read done: success# 0, data# { } 2024-11-21T23:20:31.223315Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_4921293510004579211_v1 grpc read failed 2024-11-21T23:20:31.223344Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_4921293510004579211_v1 grpc closed 2024-11-21T23:20:31.223373Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_4921293510004579211_v1 is DEAD 2024-11-21T23:20:31.226168Z :INFO: [/Root] [/Root] [7820fc23-f5db1b4a-e8ff5f46-df8ffa99] Closing read session. Close timeout: 0.000000s 2024-11-21T23:20:31.226252Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:20:31.226318Z :INFO: [/Root] [/Root] [7820fc23-f5db1b4a-e8ff5f46-df8ffa99] Counters: { Errors: 0 CurrentSessionLifetimeMs: 77 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:20:31.226452Z :NOTICE: [/Root] [/Root] [7820fc23-f5db1b4a-e8ff5f46-df8ffa99] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:20:31.226612Z :INFO: [/Root] [/Root] [4fb1273e-ff62fa9a-8443135f-8702e592] Closing read session. Close timeout: 0.000000s 2024-11-21T23:20:31.226649Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2024-11-21T23:20:31.226675Z :INFO: [/Root] [/Root] [4fb1273e-ff62fa9a-8443135f-8702e592] Counters: { Errors: 0 CurrentSessionLifetimeMs: 79 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:20:31.226733Z :NOTICE: [/Root] [/Root] [4fb1273e-ff62fa9a-8443135f-8702e592] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:20:31.226799Z :INFO: [/Root] [/Root] [bb280de2-bf86db21-aaaa839b-236b4b2] Closing read session. Close timeout: 0.000000s 2024-11-21T23:20:31.226826Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2024-11-21T23:20:31.226863Z :INFO: [/Root] [/Root] [bb280de2-bf86db21-aaaa839b-236b4b2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 85 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:20:31.226908Z :NOTICE: [/Root] [/Root] [bb280de2-bf86db21-aaaa839b-236b4b2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2024-11-21T23:20:31.224357Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_17590970474674955313_v1 grpc read done: success# 0, data# { } 2024-11-21T23:20:31.224374Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_17590970474674955313_v1 grpc read failed 2024-11-21T23:20:31.225770Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:20:31.224394Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_17590970474674955313_v1 grpc closed 2024-11-21T23:20:31.224427Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_17590970474674955313_v1 is DEAD 2024-11-21T23:20:31.225196Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876488294997677:2509] disconnected; active server actors: 1 2024-11-21T23:20:31.225224Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876488294997677:2509] client user disconnected session shared/user_3_3_4921293510004579211_v1 2024-11-21T23:20:31.225306Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2024-11-21T23:20:31.225353Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876488294997676:2508] disconnected; active server actors: 1 2024-11-21T23:20:31.225367Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876488294997676:2508] client user disconnected session shared/user_3_2_17590970474674955313_v1 2024-11-21T23:20:31.225436Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2024-11-21T23:20:31.225810Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_2_17590970474674955313_v1 2024-11-21T23:20:31.225848Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876488294997682:2516] destroyed 2024-11-21T23:20:31.225499Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_3_1_9051044003130830371_v1" (Sender=[3:7439876488294997664:2507], Pipe=[3:7439876488294997675:2507], Partitions=[], ActiveFamilyCount=0) 2024-11-21T23:20:31.225552Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] consumer user family 1 status Active partitions [0] session "shared/user_3_1_9051044003130830371_v1" sender [3:7439876488294997664:2507] lock partition 0 for ReadingSession "shared/user_3_1_9051044003130830371_v1" (Sender=[3:7439876488294997664:2507], Pipe=[3:7439876488294997675:2507], Partitions=[], ActiveFamilyCount=1) generation 1 step 2 2024-11-21T23:20:31.225908Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_2_17590970474674955313_v1 2024-11-21T23:20:31.225595Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2024-11-21T23:20:31.229252Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerConnected 2024-11-21T23:20:31.225621Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000163s 2024-11-21T23:20:31.229291Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7439876488294997687:2518], now have 1 active actors on pipe 2024-11-21T23:20:31.226518Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9051044003130830371_v1 assign: record# { Partition: 0 TabletId: 72075186224037892 Topic: "rt3.dc1--test-topic" Generation: 1 Step: 2 Session: "shared/user_3_1_9051044003130830371_v1" ClientId: "user" PipeClient { RawX1: 7439876488294997675 RawX2: 4503612512274891 } Path: "/Root/PQ/rt3.dc1--test-topic" } 2024-11-21T23:20:31.233070Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:20:31.228488Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9051044003130830371_v1 INITING TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2024-11-21T23:20:31.233099Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:20:31.233130Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Created session shared/user_3_1_9051044003130830371_v1 on pipe: [3:7439876488294997687:2518] 2024-11-21T23:20:31.232854Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9051044003130830371_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037892 Generation: 1 2024-11-21T23:20:31.233190Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_3_1_9051044003130830371_v1:1 with generation 1 2024-11-21T23:20:31.235005Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_9051044003130830371_v1 grpc read done: success# 0, data# { } 2024-11-21T23:20:31.233315Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user session is set to 0 (startOffset 0) session shared/user_3_1_9051044003130830371_v1 2024-11-21T23:20:31.235032Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9051044003130830371_v1 grpc read failed 2024-11-21T23:20:31.233448Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:20:31.235052Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9051044003130830371_v1 grpc closed 2024-11-21T23:20:31.235085Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9051044003130830371_v1 is DEAD 2024-11-21T23:20:31.241082Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876488294997675:2507] disconnected; active server actors: 1 2024-11-21T23:20:31.240068Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:20:31.240101Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_9051044003130830371_v1 2024-11-21T23:20:31.240139Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876488294997687:2518] destroyed 2024-11-21T23:20:31.240241Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_9051044003130830371_v1 2024-11-21T23:20:31.241115Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7439876488294997675:2507] client user disconnected session shared/user_3_1_9051044003130830371_v1 2024-11-21T23:20:31.257803Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 2024-11-21T23:20:31.257889Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 >> KqpJoinOrder::CanonizedJoinOrderTPCH2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin+ColumnStore >> KqpJoinOrder::FiveWayJoin+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin+ColumnStore |87.4%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTypes::DyNumberCompare [GOOD] |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats >> KqpJoinOrder::TestJoinHint1+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCDS34-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] >> KqpJoinOrder::TPCDS9-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS9+StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftOnly-StreamLookup |87.4%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2024-11-21T23:15:33.309173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:295:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:15:33.316738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:33.316997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002b10/r3tmp/tmpp4wUYo/pdisk_1.dat 2024-11-21T23:15:33.807233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:15:33.852826Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:33.903033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:33.903172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:33.918736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:34.044455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:15:34.080367Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:15:34.081586Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:15:34.082105Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:632:2537] 2024-11-21T23:15:34.082639Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:15:34.126195Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:623:2531], Recipient [1:632:2537]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:15:34.126822Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:15:34.126902Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2024-11-21T23:15:34.128250Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2024-11-21T23:15:34.128319Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2024-11-21T23:15:34.128372Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2024-11-21T23:15:34.128672Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2024-11-21T23:15:34.180726Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2024-11-21T23:15:34.180954Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2024-11-21T23:15:34.181065Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:648:2546] 2024-11-21T23:15:34.181118Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2024-11-21T23:15:34.181177Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2024-11-21T23:15:34.181222Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.181714Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.181796Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.182326Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2024-11-21T23:15:34.182461Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2024-11-21T23:15:34.182613Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:637:2539], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.182650Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.182692Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:628:2534], serverId# [1:637:2539], sessionId# [0:0:0] 2024-11-21T23:15:34.182755Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.182821Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:15:34.182874Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.182913Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2024-11-21T23:15:34.182946Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2024-11-21T23:15:34.182981Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2024-11-21T23:15:34.183031Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2024-11-21T23:15:34.183192Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:380:2375], Recipient [1:637:2539] 2024-11-21T23:15:34.183240Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2024-11-21T23:15:34.183368Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2024-11-21T23:15:34.183623Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2024-11-21T23:15:34.183664Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2024-11-21T23:15:34.183761Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2024-11-21T23:15:34.183848Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2024-11-21T23:15:34.183892Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2024-11-21T23:15:34.183923Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2024-11-21T23:15:34.183973Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.184196Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2024-11-21T23:15:34.184230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2024-11-21T23:15:34.184280Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2024-11-21T23:15:34.184317Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.184375Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2024-11-21T23:15:34.184404Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2024-11-21T23:15:34.184443Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2024-11-21T23:15:34.184492Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.184519Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2024-11-21T23:15:34.185770Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:649:2547], Recipient [1:632:2537]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2024-11-21T23:15:34.185823Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2024-11-21T23:15:34.197781Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2024-11-21T23:15:34.197864Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2024-11-21T23:15:34.197958Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2024-11-21T23:15:34.198011Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2024-11-21T23:15:34.198111Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2024-11-21T23:15:34.381993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:668:2560], Recipient [1:632:2537]: NKikimr::TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.382050Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2024-11-21T23:15:34.382079Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2558], serverId# [1:668:2560], sessionId# [0:0:0] 2024-11-21T23:15:34.382153Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:531:2462], Recipient [1:632:2537]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2024-11-21T23:15:34.382173Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2024-11-21T23:15:34.382271Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2024-11-21T23:15:34.382317Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2024-11-21T23:15:34.382357Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2024-11-21T23:15:34.382415Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2024-11-21T23:15:34.386955Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 518 RawX2: 4294969752 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2024-11-21T23:15:34.387032Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2024-11-21T23:15:34.387651Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:632:2537], Recipient [1:632:2537]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.387687Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2024-11-21T23:15:34.387730Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2024-11-21T23:15:34.387771Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:15:34.387803Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2024-11-21T23:15:34.387845Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:28147497 ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:20:09.823287Z node 17 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:09.881239Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:09.881499Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:09.894978Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:20:10.042931Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:20:10.450325Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:706:2589], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:10.451028Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:716:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:10.451162Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:10.458449Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:20:10.711394Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:720:2597], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:20:10.999478Z node 17 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8gd9efff8myqpt099x69qr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=17&id=ODk5ZmZjNjYtODMxYWZiMzUtZDkyZGRmNjItMmQ3YmYzYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:16.837538Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:90:2136], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:20:16.837822Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2024-11-21T23:20:16.837929Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002b10/r3tmp/tmpM8H9AJ/pdisk_1.dat 2024-11-21T23:20:17.274082Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:20:17.316980Z node 18 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:17.369906Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:17.370136Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:17.382371Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:20:17.511817Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:20:17.874513Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:703:2587], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:17.874669Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:714:2592], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:17.874799Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:17.881565Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2024-11-21T23:20:18.123762Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [18:717:2595], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2024-11-21T23:20:18.817483Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jd8gdgpf3534tft9zp7be8tv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=Nzk5NWJlZWYtNTdiMDZlYmUtODM5NzUxYWItZTUyOWJlYzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:19.400303Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jd8gdhsj2kpf7d4sxzkczkfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YWQ1ODIxMC1hOGU1MWIzMC0xMWQ3YTUyMi1kMGFmODg4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:19.998971Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8gdj9aapq4392dad7xws4j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MzM4ZjI5Yy0yY2MxY2I4NC01OGE4MGM3Ny1jYTY5NDFhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:20.674299Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jd8gdjtg4g340697hfq37gdd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OGUwZDU4MmQtY2RhMDY2YmItZTFiNTEzNjAtYjNjOTY4Y2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:21.264348Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jd8gdkh48saxnstkhc06ysnp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OTI0YzMwNi04OTNiMDk4ZS04YmNlNWI5Yy03OTA1ZmI0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:21.835403Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jd8gdm1zbbmbk4rzfjncz898, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MThhNTIxY2UtZjY1YWI4ZjEtYTAzNGU3OTAtY2YxMDRjN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:22.282339Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jd8gdmks1qy26hxjwnpxnf2z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YzQ0YzAzYWItZmYzYWEwYjgtYzBhNzNkMTMtZmUyYjA5MGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:22.771256Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jd8gdn1zejbgmkcs9ky15zsh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ZDYwMThlNjItNmFhM2UxMWItOGI2N2Y5MjgtM2E3MTFjODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:23.227387Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jd8gdngsf7861azpss3gp06y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YTU3NTY2ZTQtMjkwNGRiMGYtYTEzMjgxMy1kODczNjU0OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2024-11-21T23:20:23.695057Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jd8gdnz102ktx2gxyhnn7yrt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MmZmMTVjOTctMWRhZmI3MzYtYmI2YTFhMGUtODUxZGE4MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2024-11-21T23:20:27.484913Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:20:27.485013Z node 18 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487152 RowCount: 10 IndexSize: 0 InMemSize: 10487152 LastAccessTime: 1506 LastUpdateTime: 1506 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 13562 Memory: 17425232 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487152 RowCount: 10 IndexSize: 0 InMemSize: 10487152 LastAccessTime: 1506 LastUpdateTime: 1506 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 7985 Memory: 124716 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1506 LastUpdateTime: 1506 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 7985 Memory: 124716 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2024-11-21T23:20:35.136458Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jd8ge1321zkk26xm8qdj9zp1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=ODEwNjFkZjYtYWVhZWRjYjYtMTJjYTJmZjUtZTFhOTZiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 25861, MsgBus: 17838 2024-11-21T23:19:54.643959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876330638474628:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:54.644277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f4e/r3tmp/tmpAtbfTe/pdisk_1.dat 2024-11-21T23:19:55.217146Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:55.223242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:55.223356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:55.225676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25861, node 1 2024-11-21T23:19:55.294598Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:55.294615Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:55.294623Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:55.294699Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17838 TClient is connected to server localhost:17838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:55.883496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:55.902598Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:55.920541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.076749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:56.226806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:56.300550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:58.171153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876347818345379:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.179660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.209259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.237802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.310605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.337040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.381594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.433474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.488523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876347818345875:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.488614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.488838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876347818345880:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.492154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:58.500672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876347818345882:2431], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:59.564168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.601528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.642462Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876330638474628:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:59.643226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:59.650624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.730482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.800967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.837783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31673, MsgBus: 17379 2024-11-21T23:20:02.268523Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876363663890320:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:02.268572Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f4e/r3tmp/tmpcj92y1/pdisk_1.dat 2024-11-21T23:20:02.394557Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:02.416107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:02.416197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:02.421245Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31673, node 2 2024-11-21T23:20:02.519104Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:02.519136Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:02.519143Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:02.519269Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17379 TClient is connected to server localhost:17379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:03.027101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:24.546809Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876458725078009:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:24.546933Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:24.547255Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876458725078014:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:24.551719Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:24.589227Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876458725078016:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:25.999406Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.054901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.146355Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.282320Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.346191Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.404403Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14233, MsgBus: 24103 2024-11-21T23:20:29.135228Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876478776466569:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:29.135284Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f4e/r3tmp/tmp3qDi2v/pdisk_1.dat 2024-11-21T23:20:29.357614Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:29.365738Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:29.365862Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:29.368216Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14233, node 5 2024-11-21T23:20:29.559018Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:29.559061Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:29.559074Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:29.559251Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24103 TClient is connected to server localhost:24103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:20:30.327517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.348635Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:30.360738Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:30.448631Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:30.634290Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:30.736368Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:33.598108Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876495956337448:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:33.598305Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:33.686528Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:33.732318Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:33.776259Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:33.828589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:33.889810Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:33.978130Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.074071Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876500251305246:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.074254Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.074653Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876500251305251:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.080105Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:34.102789Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876500251305253:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:34.138721Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876478776466569:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:34.138795Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:36.084710Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.145320Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.199204Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.246764Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.321087Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.376763Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2024-11-21T23:19:37.608813Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:19:37.814011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:19:37.840130Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:19:37.840246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:19:37.840548Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:19:37.849918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:37.850170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:37.850757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:37.850895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:37.851017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:19:37.851212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:19:37.851365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:19:37.851494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:19:37.851650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:19:37.851765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:19:37.851894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:19:37.852026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:19:37.876371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:19:37.881266Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:19:37.881550Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:19:37.881609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:19:37.881821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:37.882014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:37.882105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:37.882168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:19:37.882284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:19:37.882366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:37.882438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:37.882477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:19:37.882677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:37.882744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:37.882793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:37.882840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:19:37.882937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:19:37.882995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:37.883068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:37.883103Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:19:37.883174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:37.883210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:37.883241Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:19:37.883301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:37.883362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:37.883390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:19:37.883584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2024-11-21T23:19:37.883687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2024-11-21T23:19:37.883799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2024-11-21T23:19:37.883993Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=133; 2024-11-21T23:19:37.884160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:37.884221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:37.884263Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:19:37.884473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:19:37.884519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:19:37.884555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:19:37.884715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:19:37.884766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:19:37.884801Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:19:37.885010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:19:37.885068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:19:37.885102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2024-11-21T23:19:37.885245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=nor ... Batch GenStep: 2:87 Blob count: 1 2024-11-21T23:20:37.943592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:740;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=6265200;indexing_debug={task_ids=3791a354-a85f11ef-8338612c-cf913112,;}; 2024-11-21T23:20:37.943709Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=61;external_task_id=36f08a82-a85f11ef-b328a154-fd120536;mem=19104780;cpu=0; 2024-11-21T23:20:37.944240Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[204] complete at tablet 9437184 2024-11-21T23:20:37.944371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:740;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=6265200;indexing_debug={task_ids=3791a354-a85f11ef-8338612c-cf913112,;}; 2024-11-21T23:20:37.944507Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=6364460;external_task_id=3791a354-a85f11ef-8338612c-cf913112;type=CS::INDEXATION;priority=0;; 2024-11-21T23:20:37.944776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=1;fline=storage.cpp:86;event=granule_compaction_weight;priority=(10,19996862644); 2024-11-21T23:20:37.944873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=1;fline=optimizer.h:893;stop_instant=NO_VALUE_OPTIONAL;size=6274712;next=;count=2;info={bytes=3137356;count=1;records=53332};event=start_optimization;stop_point=;main_portion=88; 2024-11-21T23:20:37.945103Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=manager.cpp:9;event=lock;process_id=CS::GENERAL::3791fe76-a85f11ef-8a9a43ee-a19688cc; 2024-11-21T23:20:37.945142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=ro_controller.cpp:45;event=CS::GENERAL;tablet_id=9437184; 2024-11-21T23:20:37.945266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=19104780;external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc;type=CS::GENERAL;priority=0;; 2024-11-21T23:20:37.945613Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=62;task=cpu=0;mem=6364460;external_task_id=3791a354-a85f11ef-8338612c-cf913112;type=CS::INDEXATION;priority=0;; 2024-11-21T23:20:37.945650Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=3791a354-a85f11ef-8338612c-cf913112;mem=6364460;cpu=0; 2024-11-21T23:20:37.945690Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=3791a354-a85f11ef-8338612c-cf913112;task_id=62;mem=6364460;cpu=0; 2024-11-21T23:20:37.945809Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3791a354-a85f11ef-8338612c-cf913112;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=3791a354-a85f11ef-8338612c-cf913112; 2024-11-21T23:20:38.728590Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3791a354-a85f11ef-8338612c-cf913112;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2024-11-21T23:20:38.728889Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2024-11-21T23:20:38.729519Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=63;task=cpu=0;mem=19104780;external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc;type=CS::GENERAL;priority=0;; 2024-11-21T23:20:38.729564Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc;mem=19104780;cpu=0; 2024-11-21T23:20:38.729603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:136:2168];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc;task_id=63;mem=19104780;cpu=0; 2024-11-21T23:20:38.729697Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=manager.h:99;event=ask_data;request=request_id=149;1={portions_count=2};; 2024-11-21T23:20:38.730167Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=columnshard_impl.cpp:852;event=compaction;external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc; 2024-11-21T23:20:38.730244Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:152:2180];tablet_id=9437184;parent=[1:136:2168];fline=columnshard_impl.cpp:615;event=start_changes;type=CS::GENERAL;task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc; 2024-11-21T23:20:38.731262Z node 1 :S3_WRAPPER DEBUG: external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc;fline=fake_storage.cpp:90;method=GetObject;id=[9437184:2:61:255:1:823712:0];range=bytes=0-823711;object_exists=1; 2024-11-21T23:20:38.733696Z node 1 :S3_WRAPPER DEBUG: external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc;fline=fake_storage.cpp:90;method=GetObject;id=[9437184:2:60:255:2:823712:0];range=bytes=0-823711;object_exists=1; 2024-11-21T23:20:38.738061Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc;fline=actor.cpp:48;task=agents_waiting=2;additional_info=();; 2024-11-21T23:20:38.797503Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=3791fe76-a85f11ef-8a9a43ee-a19688cc; 2024-11-21T23:20:39.659074Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:136:2168];fline=general_compaction.cpp:203;event=blobs_created_diff;appended=0;;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:0:103496];;column_id:4;chunk_idx:1;blob_range:[NO_BLOB:103496:103456];;column_id:4;chunk_idx:2;blob_range:[NO_BLOB:206952:103392];;column_id:4;chunk_idx:3;blob_range:[NO_BLOB:310344:101512];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:411856:103496];;column_id:6;chunk_idx:1;blob_range:[NO_BLOB:515352:103456];;column_id:6;chunk_idx:2;blob_range:[NO_BLOB:618808:103392];;column_id:6;chunk_idx:3;blob_range:[NO_BLOB:722200:101512];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:0:156904];;column_id:3;chunk_idx:1;blob_range:[NO_BLOB:156904:156864];;column_id:3;chunk_idx:2;blob_range:[NO_BLOB:313768:156800];;column_id:3;chunk_idx:3;blob_range:[NO_BLOB:470568:154904];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:625472:103496];;column_id:2;chunk_idx:1;blob_range:[NO_BLOB:728968:103456];;column_id:2;chunk_idx:2;blob_range:[NO_BLOB:832424:103392];;column_id:2;chunk_idx:3;blob_range:[NO_BLOB:935816:101512];;column_id:10;chunk_idx:0;blob_range:[NO_BLOB:1037328:103496];;column_id:10;chunk_idx:1;blob_range:[NO_BLOB:1140824:103456];;column_id:10;chunk_idx:2;blob_range:[NO_BLOB:1244280:103392];;column_id:10;chunk_idx:3;blob_range:[NO_BLOB:1347672:101512];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:1449184:53688];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:1502872:53688];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:1556560:53688];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:1610248:53672];;column_id:8;chunk_idx:0;blob_range:[NO_BLOB:1663920:53592];;column_id:8;chunk_idx:1;blob_range:[NO_BLOB:1717512:53608];;column_id:8;chunk_idx:2;blob_range:[NO_BLOB:1771120:53600];;column_id:8;chunk_idx:3;blob_range:[NO_BLOB:1824720:53592];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:1878312:53592];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:1931904:53608];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:1985512:53600];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:2039112:53592];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:2092704:53520];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:2146224:53216];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:2199440:53288];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:2252728:53240];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:2305968:640];;column_id:4294967040;chunk_idx:1;blob_range:[NO_BLOB:2306608:640];;column_id:4294967040;chunk_idx:2;blob_range:[NO_BLOB:2307248:640];;column_id:4294967040;chunk_idx:3;blob_range:[NO_BLOB:2307888:640];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:2308528:640];;column_id:4294967041;chunk_idx:1;blob_range:[NO_BLOB:2309168:640];;column_id:4294967041;chunk_idx:2;blob_range:[NO_BLOB:2309808:640];;column_id:4294967041;chunk_idx:3;blob_range:[NO_BLOB:2310448:640];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:2311088:632];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:2311720:632];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:2312352:632];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:2312984:632];;;;switched=(portion_id:87;path_id:1;records_count:53332;min_schema_snapshot:(plan_step=101;tx_id=101;);schema_version:1;level:0;column_size:3137328;index_size:28;meta:((produced=INSERTED;)););(portion_id:88;path_id:1;records_count:53332;min_schema_snapshot:(plan_step=101;tx_id=101;);schema_version:1;level:0;column_size:3137328;index_size:28;meta:((produced=SPLIT_COMPACTED;)););; 2024-11-21T23:20:39.659163Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:136:2168];fline=general_compaction.cpp:205;event=blobs_created;appended=1;switched=2; 2024-11-21T23:20:39.659428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'hot' stopped at tablet 9437184 >> KqpJoin::JoinLeftPureInner [GOOD] >> KqpJoin::JoinLeftPureInnerConverted >> KqpJoin::ComplexJoin [GOOD] >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin-ColumnStore >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] >> KqpJoin::JoinDupColumnRightPure [GOOD] >> KqpJoin::JoinLeftPureFull >> KqpJoinOrder::TPCDS16-StreamLookupJoin-ColumnStore >> KqpJoin::RightSemiJoin_FullScan >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ComplexJoin [GOOD] Test command err: Trying to start YDB, gRPC: 27203, MsgBus: 25827 2024-11-21T23:19:53.277947Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876327107498561:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:53.278050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f56/r3tmp/tmpjQQyKh/pdisk_1.dat 2024-11-21T23:19:53.667150Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:53.700844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:53.701002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27203, node 1 2024-11-21T23:19:53.705263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:53.768568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:53.768595Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:53.768608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:53.768716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25827 TClient is connected to server localhost:25827 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:54.376092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:54.429280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:54.582107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:54.762883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:54.848379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:56.971496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876339992402168:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.971659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:57.291753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.335657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.378430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.416199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.444562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.484260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.584112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876344287369963:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:57.584174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:57.584296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876344287369968:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:57.587344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:57.595335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876344287369970:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:58.279133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876327107498561:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:58.279208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:58.799438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.843231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.887734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2844, MsgBus: 22809 2024-11-21T23:20:00.628808Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876356611303857:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:00.629770Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f56/r3tmp/tmpTIkkIA/pdisk_1.dat 2024-11-21T23:20:00.872099Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2844, node 2 2024-11-21T23:20:00.938760Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:00.938783Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:00.938790Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:00.938880Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:20:00.947697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:00.947786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:00.951086Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22809 TClient is connected to server localhost:22809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:01.439951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:01.461721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:01.548096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:01.733889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiti ... ropose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:20.073524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876443039550560:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.073638Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.122873Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.203107Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.285869Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.327692Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.374992Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.416600Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.494069Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876443039551059:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.494151Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.494384Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876443039551064:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.499464Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:20.510198Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876443039551066:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:20.676020Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439876421564712545:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:20.676079Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13091, MsgBus: 3783 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f56/r3tmp/tmpHJFJ8p/pdisk_1.dat 2024-11-21T23:20:30.110288Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:20:30.116597Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:30.148093Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:30.148205Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:30.151651Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13091, node 5 2024-11-21T23:20:30.259029Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:30.259057Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:30.259069Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:30.259184Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3783 TClient is connected to server localhost:3783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:20:30.884312Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:30.896859Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2024-11-21T23:20:31.005378Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:31.237201Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:31.310894Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:34.507844Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876502846646353:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.507960Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.553419Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.600381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.687722Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.800379Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.905249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:35.080763Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:35.176130Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876507141614161:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:35.176245Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:35.176501Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876507141614166:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:35.184253Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:35.211435Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876507141614168:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:37.435397Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.515220Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.557001Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.618302Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.669835Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10630, MsgBus: 22343 2024-11-21T23:19:55.659114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876335331753911:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:55.659265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f4a/r3tmp/tmpoaeS6Y/pdisk_1.dat 2024-11-21T23:19:56.304886Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:56.319506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:56.319686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:56.332464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10630, node 1 2024-11-21T23:19:56.501019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:56.501042Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:56.501050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:56.501141Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22343 TClient is connected to server localhost:22343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:57.264212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:19:57.317302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.600197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:57.808537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:57.894313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:59.930562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876352511624806:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:59.930689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:00.254293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:00.332551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:00.401518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:00.432427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:00.472547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:00.509716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:00.620170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876356806592605:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:00.620271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:00.620532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876356806592610:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:00.624816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:00.639365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876356806592612:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:00.661326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876335331753911:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:00.661403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:01.899269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.949273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:01.989967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.052658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.082218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.115779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28499, MsgBus: 9750 2024-11-21T23:20:04.034575Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876371209293460:2054];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:04.034619Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f4a/r3tmp/tmpjcg8ew/pdisk_1.dat 2024-11-21T23:20:04.295986Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:04.308142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:04.308231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:04.309864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28499, node 2 2024-11-21T23:20:04.410926Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:04.410955Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:04.410963Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:04.411082Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9750 TClient is connected to server localhost:9750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:04.862332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:04.872163Z node 2 :FLA ... f is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.376582Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876466766699206:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:26.376670Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:26.376950Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876466766699211:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:26.381901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:26.401042Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876466766699213:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:27.901619Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:27.977631Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.016743Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.103138Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.174971Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.225892Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9197, MsgBus: 7652 2024-11-21T23:20:30.910720Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876483781237649:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f4a/r3tmp/tmpkBU4TI/pdisk_1.dat 2024-11-21T23:20:31.103983Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:20:31.309282Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:31.329593Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:31.329712Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:31.332876Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9197, node 5 2024-11-21T23:20:31.500591Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:31.500620Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:31.500629Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:31.500775Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7652 TClient is connected to server localhost:7652 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:32.438039Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:32.452570Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:32.467335Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:32.613780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:32.824899Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:32.947356Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:35.866845Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876483781237649:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:35.866925Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:36.482606Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876509551042969:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:36.482727Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:36.560035Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.654189Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.736948Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.858339Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.898211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.997150Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.106061Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876513846010773:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:37.106182Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:37.106585Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876513846010778:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:37.113768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:37.129533Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876513846010780:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:38.693375Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.769824Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.817665Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.896563Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.977198Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:39.041118Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS95-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin-ColumnStore >> KqpJoin::JoinConvert [GOOD] |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |87.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |87.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::DyNumberCompare [GOOD] Test command err: Trying to start YDB, gRPC: 12439, MsgBus: 6272 2024-11-21T23:18:18.727619Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875918527134673:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:18.727660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0028dd/r3tmp/tmpRmsx0v/pdisk_1.dat 2024-11-21T23:18:19.506713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:18:19.506828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:18:19.515402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:18:19.571360Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12439, node 1 2024-11-21T23:18:19.858916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:18:19.858940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:18:19.858949Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:18:19.859042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6272 TClient is connected to server localhost:6272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:18:20.597672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.651071Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:18:20.664829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:20.853845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:21.139386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:21.237838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:23.735212Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875918527134673:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:18:23.735294Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:18:23.932701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875940001972639:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:23.954116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:24.364602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.428027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.490612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.581610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.620544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.676300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:18:24.781377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875944296940441:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:24.781498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:24.781742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875944296940447:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:18:24.785305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:18:24.796328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875944296940449:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:18:25.924886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:18:34.525552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:18:34.525593Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:04.392282Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231144352, txId: 281474976715672] shutting down 2024-11-21T23:19:04.454597Z node 1 :RPC_REQUEST WARN: Client lost 2024-11-21T23:19:05.681135Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231145670, txId: 281474976715674] shutting down 2024-11-21T23:19:06.934200Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231146915, txId: 281474976715676] shutting down 2024-11-21T23:19:08.309843Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231148296, txId: 281474976715678] shutting down 2024-11-21T23:19:09.783272Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231149732, txId: 281474976715680] shutting down 2024-11-21T23:19:11.194960Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231151178, txId: 281474976715682] shutting down 2024-11-21T23:19:12.441890Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231152434, txId: 281474976715684] shutting down 2024-11-21T23:19:13.827415Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231153803, txId: 281474976715686] shutting down 2024-11-21T23:19:15.062628Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231155054, txId: 281474976715688] shutting down 2024-11-21T23:19:16.527989Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231156474, txId: 281474976715690] shutting down 2024-11-21T23:19:17.984440Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231157953, txId: 281474976715692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:551, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1839528F 1. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:551: Execute_ @ 0x17AEEDE8 2. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17: operator() @ 0x17B01B67 3. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:344: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17:1) &> @ 0x17B01B67 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:419: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17:1) &> @ 0x17B01B67 5. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:195: operator() @ 0x17B01B67 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:366: operator() @ 0x17B01B67 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:519: operator() @ 0x183D4188 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:1170: operator() @ 0x183D4188 9. /-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x183D4188 10. /-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x1839BDF8 11. /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:17: Execute @ 0x17B00CEB 12. /-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x1839D6C5 13. /-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x183CDDCC 14. ??:0: ?? @ 0x7F4B34646D8F 15. ??:0: ?? @ 0x7F4B34646E3F 16. ??:0: ?? @ 0x1569B028 Trying to start YDB, gRPC: 28489, MsgBus: 17487 2024-11-21T23:19:24.055856Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:743987619 ... e, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:30.231979Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:30.466346Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:20:30.576427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2024-11-21T23:20:33.945017Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876497657440720:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:33.945426Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:33.997067Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.079203Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876480477569966:2153];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:34.080238Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:34.116297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.162322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.225464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.285359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.498106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.603552Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876501952408530:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.603662Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.603861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876501952408535:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.608560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:34.637887Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876501952408537:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:36.988968Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876510542343524:2474], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Double
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Double
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Double
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Double 2024-11-21T23:20:36.990682Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWIxMDA3ZWQtZjgyMGU1ZGQtNDFiODMzNGItNmYxNGM3Yzc=, ActorId: [3:7439876510542343471:2465], ActorState: ExecuteState, TraceId: 01jd8ge3aj6ag4180v16ers1x5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Double
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Double
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Double
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Double 2024-11-21T23:20:37.056707Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876514837310825:2476], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2024-11-21T23:20:37.057519Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWIxMDA3ZWQtZjgyMGU1ZGQtNDFiODMzNGItNmYxNGM3Yzc=, ActorId: [3:7439876510542343471:2465], ActorState: ExecuteState, TraceId: 01jd8ge3cbfwaem0473xbr9vgh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2024-11-21T23:20:37.115752Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7439876514837310832:2478], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2024-11-21T23:20:37.118903Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWIxMDA3ZWQtZjgyMGU1ZGQtNDFiODMzNGItNmYxNGM3Yzc=, ActorId: [3:7439876510542343471:2465], ActorState: ExecuteState, TraceId: 01jd8ge3eq4n3jsnzpjsphxdjz, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinConvert [GOOD] Test command err: Trying to start YDB, gRPC: 21514, MsgBus: 21854 2024-11-21T23:20:00.070983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876355056031379:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:00.071533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f40/r3tmp/tmpXmoftA/pdisk_1.dat 2024-11-21T23:20:00.728768Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:00.733203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:00.733328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:00.736167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21514, node 1 2024-11-21T23:20:01.011636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:01.011660Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:01.011667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:01.011766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21854 TClient is connected to server localhost:21854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:01.754497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:01.804671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:01.944255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:20:02.171145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:20:02.240696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:04.303446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876372235902130:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:04.303558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:04.575397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:04.618262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:04.699855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:04.726382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:04.767690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:04.801649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:04.873472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876372235902628:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:04.873556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:04.873762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876372235902633:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:04.877065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:04.886619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876372235902635:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:05.014133Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876355056031379:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:05.014205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:06.051755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:06.093922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:06.134370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26030, MsgBus: 19353 2024-11-21T23:20:07.907804Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876384459562564:2112];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:07.932019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f40/r3tmp/tmpgMqqGD/pdisk_1.dat 2024-11-21T23:20:08.021883Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26030, node 2 2024-11-21T23:20:08.059440Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:08.059520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:08.061626Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:20:08.110973Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:08.110996Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:08.111005Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:08.111107Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19353 TClient is connected to server localhost:19353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:08.605532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:08.611244Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:20:08.620913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:08.704085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:08.842191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part pro ... ARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:29.632139Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:29.710442Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:29.753375Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439876457145057063:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:29.753715Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:29.791473Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:29.894237Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876478619895612:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:29.894351Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:29.894710Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876478619895617:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:29.904759Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:29.940018Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876478619895619:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:31.340213Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:31.402026Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:31.468801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8537, MsgBus: 24953 2024-11-21T23:20:35.509724Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876507543622279:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:35.509940Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f40/r3tmp/tmpJZSYpx/pdisk_1.dat 2024-11-21T23:20:35.824776Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:35.892473Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:35.892602Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:35.897718Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8537, node 5 2024-11-21T23:20:36.115081Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:36.115110Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:36.115116Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:36.115229Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24953 TClient is connected to server localhost:24953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:20:36.854683Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.863539Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:20:36.870609Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:36.971589Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:37.198484Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:37.310715Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:40.340534Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876529018460460:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:40.340621Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:40.396881Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:40.439928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:40.482820Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:40.515377Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876507543622279:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:40.515460Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:40.553549Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:40.593456Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:40.663322Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:40.742039Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876529018460960:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:40.742112Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:40.742454Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876529018460965:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:40.746563Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:40.758660Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876529018460967:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:42.285294Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:42.333560Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:42.383203Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:42.970080Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> KqpFlipJoin::LeftSemi_3 [GOOD] |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |87.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |87.5%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |87.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 27029, MsgBus: 26281 2024-11-21T23:19:52.290684Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876322076044466:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:52.290725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f59/r3tmp/tmp9Nw5Ev/pdisk_1.dat 2024-11-21T23:19:52.787174Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:52.806230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:52.806350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:52.808200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27029, node 1 2024-11-21T23:19:52.899111Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:52.899135Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:52.899142Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:52.899242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26281 TClient is connected to server localhost:26281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:53.504222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.537302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.681641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.880444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.973652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:56.107350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876334960948056:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.130680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.173375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.213103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.274958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.368015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.411822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.497295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.569505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876339255915855:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.569597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.569829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876339255915861:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.574502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:56.603090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876339255915863:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:57.291575Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876322076044466:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:57.291662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:57.715004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.795569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.831499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:19:57.872996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","PlanNodeId":3,"Columns":["Key","Value"],"E-Rows":"4","Table":"FJ_Table_1","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["FJ_Table_2"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1732231198594,"Host":"ghrun-uc25mmfz34","ResultRows":2,"ResultBytes":26,"OutputRows":2,"StartTimeMs":1732231198594,"IngressRows":2,"ComputeTimeUs":128,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":2,"DstStageId":0,"Bytes":26}],"WaitInputTimeUs":2432,"TaskId":1,"OutputBytes":26}],"PeakMemoryUsageBytes":65536,"CpuTimeUs":1505}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":26,"Max":26,"Min":26}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":2474,"Max":2474,"Min":2474},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576},"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1,"ResultBytes":{"Count":1,"Sum":26,"Max":26,"Min":26},"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1732231198591,"WaitInputTimeUs":{"Count":1,"Sum":2432,"Max":2432,"Min":2432},"OutputBytes":{"Count":1,"Sum":26,"Max":26,"Min":26},"CpuTimeUs":{"Count":1,"Sum":702,"Max":702,"Min":702},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64}},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":64,"Max":64,"Min":64},"WaitTimeUs":{"Count":1,"Sum":2448,"Max":2448,"Min":2448},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"ComputeNodes":[{"Tasks":[{"InputBytes":26,"FinishTimeMs":1732231198603,"Host":"ghrun-uc25mmfz34","OutputRows":2,"StartTimeMs":1732231198594,"InputRows":2,"ComputeTimeUs":96,"InputChannels":[{"WaitTimeUs":2077,"ChannelId":1,"Rows":2,"SrcStageId":0,"Bytes":26}],"NodeId":1,"OutputChannels":[{"ChannelId":2,"Rows":2,"DstStageId":2,"Bytes":42}],"WaitInputT ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.137338Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.269758Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.346333Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.442500Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.571971Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876484386072142:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:30.572074Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:30.573142Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876484386072147:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:30.581521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:30.595037Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876484386072149:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:32.334297Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:32.381268Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:32.442487Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:32.513770Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18299, MsgBus: 62842 2024-11-21T23:20:35.559197Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439876504863305325:2074];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:35.608910Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f59/r3tmp/tmpKX6A6R/pdisk_1.dat 2024-11-21T23:20:35.919362Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:35.987427Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:35.987539Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:35.995652Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18299, node 6 2024-11-21T23:20:36.203053Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:36.203105Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:36.203115Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:36.203241Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62842 TClient is connected to server localhost:62842 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:37.192351Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:37.200086Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:20:37.213806Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:37.377708Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:37.664850Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:37.804002Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:40.562368Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439876504863305325:2074];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:40.562458Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:41.254582Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876530633110793:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:41.254708Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:41.374543Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:41.474384Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:41.580088Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:41.672070Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:41.744015Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:41.843427Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:41.972780Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876530633111302:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:41.972901Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:41.974095Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876530633111308:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:41.979737Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:41.998095Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439876530633111310:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:44.022767Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:44.095090Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:44.150321Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:44.261286Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |87.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemi >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |87.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull >> TColumnShardTestSchema::HotTiersTtl >> KqpJoin::JoinLeftPureFull [GOOD] >> KqpJoin::JoinLeftPureExclusion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH8-StreamLookupJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17554, MsgBus: 3554 2024-11-21T23:17:34.085465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875729617026323:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:34.085519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f7b/r3tmp/tmp4WV23P/pdisk_1.dat 2024-11-21T23:17:34.598049Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:17:34.603621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:17:34.603723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:17:34.607901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17554, node 1 2024-11-21T23:17:34.732446Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:17:34.732476Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:17:34.732492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:17:34.732610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3554 TClient is connected to server localhost:3554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:17:35.415812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:35.442307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:35.583750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:35.759313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:35.829774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:17:37.715065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875742501929692:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:37.715219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:38.009656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.045875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.083323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.133490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.225911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.284790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:17:38.462274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875746796897493:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:38.462436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:38.462994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439875746796897499:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:17:38.467465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:17:38.483089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:17:38.483297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439875746796897501:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:17:39.090665Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439875729617026323:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:17:39.090730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:17:39.751308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.787835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.859916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.897349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:17:39.935798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.061913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.134673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.165058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.193472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.230996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.259986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.330473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.361037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:17:40.923793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T23:17:40.976886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.025681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.053808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.090075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T23:17:41.121833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: E ... 72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:20:29.050138Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:20:29.050190Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:20:29.050791Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:20:29.050848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:20:29.051035Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:20:29.051074Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:20:29.051319Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:20:29.051358Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:20:29.051495Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:20:29.051531Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038690;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:20:29.058105Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:20:29.058182Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:20:29.058301Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:20:29.058338Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:20:29.058566Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:20:29.058605Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:20:29.058719Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:20:29.058763Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:20:29.058848Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:20:29.058882Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:20:29.058933Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:20:29.058975Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:20:29.059387Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:20:29.059480Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:20:29.059746Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:20:29.059785Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:20:29.059968Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:20:29.060002Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:20:29.060213Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:20:29.060248Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:20:29.060378Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:20:29.060408Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038698;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2024-11-21T23:20:29.076930Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:20:29.077007Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:20:29.077127Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:20:29.077170Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:20:29.077377Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:20:29.077415Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:20:29.077526Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:20:29.077566Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:20:29.077642Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:20:29.077676Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:20:29.077727Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:20:29.077764Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:20:29.078181Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:20:29.078226Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:20:29.078845Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:20:29.078898Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:20:29.079095Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:20:29.079134Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:20:29.079352Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:20:29.079392Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:20:29.079534Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2024-11-21T23:20:29.079566Z node 5 :TX_COLUMNSHARD WARN: tablet_id=72075186224038695;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; >> KqpJoin::RightSemiJoin_FullScan [GOOD] >> KqpJoin::RightSemiJoin_ComplexKey >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS94+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS94-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |87.6%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> KqpJoinOrder::FiveWayJoinWithConstantFold-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin-ColumnStore >> KqpJoin::JoinLeftPureInnerConverted [GOOD] >> KqpJoin::JoinMismatchDictKeyTypes >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS87+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS87-StreamLookupJoin+ColumnStore >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |87.6%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] >> KqpIndexLookupJoin::LeftSemi [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup >> TColumnShardTestSchema::HotTiersWithStat [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple+StreamLookupJoin-ColumnStore |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |87.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> KqpJoinOrder::TPCDS34-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231780.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=132231780.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231780.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231780.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=132231780.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=132231780.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230580.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=112231780.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112231780.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230580.000000s;Name=tier0;Codec=zstd};}{{Column=timestamp;EvictAfter=112230580.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=112230580.000000s;Name=;Codec=}; 2024-11-21T23:19:40.585173Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:19:40.752052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:19:40.774179Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:19:40.774271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:19:40.774619Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:19:40.783213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:40.783461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:40.783740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:40.783875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:40.783978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:19:40.784113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:19:40.784347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:19:40.784467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:19:40.784599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:19:40.784713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:19:40.784813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:19:40.784906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:19:40.807418Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:19:40.807509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:19:40.823738Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:19:40.824081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:19:40.824132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:19:40.824291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:40.824457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:40.824544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:40.824580Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:19:40.824641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:19:40.824686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:40.824736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:40.824781Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:19:40.824929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:40.824982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:40.825027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:40.825058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:19:40.825144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:19:40.825193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:40.825239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:40.825269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:19:40.825346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:40.825381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:40.825413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:19:40.825461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:40.825503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:40.825531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:19:40.825715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=63; 2024-11-21T23:19:40.825789Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2024-11-21T23:19:40.825874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2024-11-21T23:19:40.825954Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2024-11-21T23:19:40.826143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:40.826208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:40.826243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... nished for tablet 9437184 2024-11-21T23:21:00.927878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:941:2941] send ScanData to [1:940:2940] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:21:00.928337Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:941:2941] and sent to [1:940:2940] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.007},{"events":["l_bootstrap"],"t":0.027},{"events":["f_processing","f_task_result"],"t":0.029},{"events":["f_ack","l_task_result"],"t":0.558},{"events":["l_ProduceResults","f_Finish"],"t":0.562},{"events":["l_ack","l_processing","l_Finish"],"t":0.563}],"full":{"a":1732231260364907,"name":"_full_task","f":1732231260364907,"d_finished":0,"c":0,"l":1732231260927923,"d":563016},"events":[{"name":"bootstrap","f":1732231260365113,"d_finished":27581,"c":1,"l":1732231260392694,"d":27581},{"a":1732231260927566,"name":"ack","f":1732231260923535,"d_finished":2886,"c":3,"l":1732231260927446,"d":3243},{"a":1732231260927556,"name":"processing","f":1732231260394040,"d_finished":385164,"c":24,"l":1732231260927450,"d":385531},{"name":"ProduceResults","f":1732231260372788,"d_finished":11499,"c":29,"l":1732231260927779,"d":11499},{"a":1732231260927781,"name":"Finish","f":1732231260927781,"d_finished":0,"c":0,"l":1732231260927923,"d":142},{"name":"task_result","f":1732231260394065,"d_finished":381824,"c":21,"l":1732231260923318,"d":381824}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:941:2941]->[1:940:2940] 2024-11-21T23:21:00.928419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:21:00.364411Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T23:21:00.928457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:21:00.928635Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.148110s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.129019s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.021735s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.036722s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};{name=SNAPSHOT;duration=0.031708s;size=0;details={};};]};; 2024-11-21T23:21:00.928711Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:21:00.930630Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T23:21:00.930862Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T23:21:00.931006Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T23:21:00.931212Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T23:21:00.931309Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T23:21:00.931939Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:948:2948];trace_detailed=; 2024-11-21T23:21:00.932396Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T23:21:00.932679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:21:00.932914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:00.933073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:00.933342Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:21:00.933465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:00.933601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:00.933649Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:948:2948] finished for tablet 9437184 2024-11-21T23:21:00.933751Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:948:2948] send ScanData to [1:947:2947] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:21:00.934226Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:948:2948] and sent to [1:947:2947] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1732231260931858,"name":"_full_task","f":1732231260931858,"d_finished":0,"c":0,"l":1732231260933806,"d":1948},"events":[{"name":"bootstrap","f":1732231260932073,"d_finished":1033,"c":1,"l":1732231260933106,"d":1033},{"a":1732231260933315,"name":"ack","f":1732231260933315,"d_finished":0,"c":0,"l":1732231260933806,"d":491},{"a":1732231260933298,"name":"processing","f":1732231260933298,"d_finished":0,"c":0,"l":1732231260933806,"d":508},{"name":"ProduceResults","f":1732231260932814,"d_finished":544,"c":2,"l":1732231260933628,"d":544},{"a":1732231260933631,"name":"Finish","f":1732231260933631,"d_finished":0,"c":0,"l":1732231260933806,"d":175}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:948:2948]->[1:947:2947] 2024-11-21T23:21:00.934331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:21:00.931380Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T23:21:00.934385Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:21:00.934965Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T23:21:00.935093Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> KqpJoin::IdxLookupLeftPredicate >> KqpJoin::JoinLeftPureExclusion [GOOD] >> KqpJoin::JoinLeftPureCross ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231781.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231781.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231781.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231781.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231781.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231781.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230581.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112231781.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112231781.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230581.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=112230581.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112230581.000000s;Name=;Codec=}; 2024-11-21T23:19:41.848345Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:19:42.026231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:19:42.052336Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:19:42.052485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:19:42.052784Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:19:42.061799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:42.062052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:42.062339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:42.063595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:42.063769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:19:42.063918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:19:42.064054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:19:42.064189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:19:42.064298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:19:42.064414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:19:42.064520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:19:42.064626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:19:42.088382Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:19:42.088487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:19:42.097529Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:19:42.097847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:19:42.097906Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:19:42.098132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:42.098317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:42.098419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:42.098466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:19:42.099197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:19:42.099307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:42.099361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:42.099411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:19:42.099641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:42.099709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:42.099758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:42.099793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:19:42.099889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:19:42.099943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:42.099997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:42.100027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:19:42.100105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:42.100144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:42.100173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:19:42.100222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:42.100264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:42.100293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:19:42.100513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=68; 2024-11-21T23:19:42.100603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2024-11-21T23:19:42.100692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2024-11-21T23:19:42.100800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2024-11-21T23:19:42.100978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:42.101038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:42.101073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTx ... 2941] finished for tablet 9437184 2024-11-21T23:21:01.210436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:941:2941] send ScanData to [1:940:2940] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:21:01.210910Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:941:2941] and sent to [1:940:2940] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.529},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.533}],"full":{"a":1732231260676483,"name":"_full_task","f":1732231260676483,"d_finished":0,"c":0,"l":1732231261210479,"d":533996},"events":[{"name":"bootstrap","f":1732231260676758,"d_finished":6161,"c":1,"l":1732231260682919,"d":6161},{"a":1732231261210133,"name":"ack","f":1732231261205760,"d_finished":3282,"c":3,"l":1732231261210047,"d":3628},{"a":1732231261210122,"name":"processing","f":1732231260684257,"d_finished":355812,"c":24,"l":1732231261210050,"d":356169},{"name":"ProduceResults","f":1732231260679399,"d_finished":7622,"c":29,"l":1732231261210316,"d":7622},{"a":1732231261210318,"name":"Finish","f":1732231261210318,"d_finished":0,"c":0,"l":1732231261210479,"d":161},{"name":"task_result","f":1732231260684289,"d_finished":352103,"c":21,"l":1732231261205548,"d":352103}],"id":"9437184::9"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:941:2941]->[1:940:2940] 2024-11-21T23:21:01.211006Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:21:00.675981Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T23:21:01.211052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:21:01.211233Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.170028s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.176781s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.048823s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.054890s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:21:01.211312Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:941:2941];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:21:01.213190Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 9 at tablet 9437184 2024-11-21T23:21:01.213410Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000013:max} readable: {1000000013:max} at tablet 9437184 2024-11-21T23:21:01.213547Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T23:21:01.213734Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T23:21:01.213833Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T23:21:01.214378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000013:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:948:2948];trace_detailed=; 2024-11-21T23:21:01.217621Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T23:21:01.217906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:21:01.218139Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:01.218308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:01.222188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:21:01.222344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:01.222525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:01.222586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:948:2948] finished for tablet 9437184 2024-11-21T23:21:01.222718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:948:2948] send ScanData to [1:947:2947] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:21:01.223285Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:948:2948] and sent to [1:947:2947] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0.002},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.004},{"events":["f_ack","f_processing"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":1732231261214311,"name":"_full_task","f":1732231261214311,"d_finished":0,"c":0,"l":1732231261222783,"d":8472},"events":[{"name":"bootstrap","f":1732231261217186,"d_finished":1160,"c":1,"l":1732231261218346,"d":1160},{"a":1732231261222148,"name":"ack","f":1732231261222148,"d_finished":0,"c":0,"l":1732231261222783,"d":635},{"a":1732231261222113,"name":"processing","f":1732231261222113,"d_finished":0,"c":0,"l":1732231261222783,"d":670},{"name":"ProduceResults","f":1732231261218030,"d_finished":633,"c":2,"l":1732231261222567,"d":633},{"a":1732231261222571,"name":"Finish","f":1732231261222571,"d_finished":0,"c":0,"l":1732231261222783,"d":212}],"id":"9437184::10"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:948:2948]->[1:947:2947] 2024-11-21T23:21:01.223445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:21:01.213901Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T23:21:01.223513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:21:01.223574Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T23:21:01.223723Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:948:2948];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 160000/9495672 80000/4750028 0/0 >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: 2024-11-21T23:12:28.069590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:398:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2024-11-21T23:12:28.069874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:12:28.069958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002710/r3tmp/tmpZdFuk2/pdisk_1.dat 2024-11-21T23:12:28.531057Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10601, node 1 2024-11-21T23:12:28.896746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:12:28.898350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:12:28.898442Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:12:28.899754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:12:28.972580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2024-11-21T23:12:29.080385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:29.080545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:29.100711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7400 2024-11-21T23:12:29.857457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:12:33.757801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:33.757934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:33.823543Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:12:33.834703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:34.141345Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:12:34.250589Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:12:34.250732Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:12:34.304323Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:12:34.311054Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:12:34.311357Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:12:34.311434Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:12:34.311503Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:12:34.311613Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:12:34.311706Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:12:34.311786Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:12:34.312397Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:12:34.616183Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:12:34.616328Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1755:2549], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:12:34.650044Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1759:2552] 2024-11-21T23:12:34.681344Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:1803:2576] 2024-11-21T23:12:34.681833Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:1803:2576], schemeshard id = 72075186224037889 2024-11-21T23:12:34.690178Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T23:12:34.807704Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:12:34.807780Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:12:34.807885Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T23:12:34.839155Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:12:34.839281Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:12:34.863369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:12:34.891429Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:12:34.891603Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:12:34.911467Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:12:34.941519Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:12:34.970145Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:12:35.638834Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:12:35.835164Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:12:37.346328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2140:3021], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:37.346526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:37.465479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037889 2024-11-21T23:12:38.231954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2433:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:38.232148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:38.233647Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2438:3075]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:12:38.239245Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2024-11-21T23:12:38.244161Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2440:3077] 2024-11-21T23:12:38.244282Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2440:3077] 2024-11-21T23:12:38.245185Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:2441:2945] 2024-11-21T23:12:38.245554Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2440:3077], server id = [2:2441:2945], tablet id = 72075186224037897, status = OK 2024-11-21T23:12:38.245743Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [2:2441:2945], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T23:12:38.254233Z node 2 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2024-11-21T23:12:38.258687Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:12:38.258861Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2438:3075], StatRequests.size() = 1 2024-11-21T23:12:38.388292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2445:3081], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:38.388430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:38.390008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2450:3086], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:12:38.434858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2024-11-21T23:12:38.682906Z node 2 :STATISTICS DEBUG: [72075186224037897] EvFastPropagateCheck 2024-11-21T23:12:38.683006Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2024-11-21T23:12:38.774143Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2440:3077], schemeshard count = 1 2024-11-21T23:12:39.105286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2452:3088], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2024-11-21T23:12:39.298370Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2590:3177]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:12:39.298643Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2024-11-21T23:12:39.298698Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2590:3177], StatRequests.size() = 1 2024-11-21T23:12:39.690712Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jd8fzfq03d7rfzs08eb3a7cz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQyNzI2Zi02YjBmZDQ4OS1jM2EwNjM3ZS02OWE4NzY1Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for TEvPeriodicTableStats 2024-11-21T23:12:42.694953Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 1, schemeshard count ... 1T23:19:22.768024Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:22.983464Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T23:19:22.983544Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T23:19:22.983595Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T23:19:22.983629Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T23:19:24.762790Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:19:24.763171Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:19:24.763438Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:19:26.834932Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:26.835011Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:30.355027Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:19:30.419227Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:30.419311Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:34.119212Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:19:34.119408Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:19:34.119754Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:19:34.167189Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:34.167271Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:37.568172Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:37.568254Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:39.235162Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:19:41.140397Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:41.140480Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:43.015011Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:19:43.015385Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:19:43.015673Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:19:44.863181Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:44.863262Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:48.207038Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:19:48.329810Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:48.329893Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:51.808917Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:19:51.809357Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:19:51.809651Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:19:51.901696Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:51.901770Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:55.405613Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:55.405676Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:56.844081Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:19:58.296279Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:19:58.296355Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:19:59.870132Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:19:59.870571Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:19:59.870876Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:20:01.601121Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:01.601196Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:04.527778Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:20:04.587074Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:04.587153Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:07.212344Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:20:07.212744Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:20:07.213034Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:20:07.271159Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:07.271234Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:10.335156Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:10.335236Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:11.639903Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:20:13.106617Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:13.106691Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:13.351245Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2024-11-21T23:20:13.351325Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T23:20:13.351358Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2024-11-21T23:20:13.351394Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2024-11-21T23:20:15.063019Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:20:15.063409Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:20:15.063703Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:20:17.071231Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:17.071309Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:20.727212Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:20:20.802728Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:20.802806Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:24.357286Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:20:24.357472Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:20:24.357836Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:20:24.398044Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:24.398115Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:27.996893Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:27.996969Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:29.789184Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:20:31.754934Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:31.755012Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:33.715963Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:20:33.716132Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:20:33.716418Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:20:35.663135Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:35.663216Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:39.254468Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:20:39.291258Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:39.291337Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:43.089712Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:20:43.089926Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:20:43.090247Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2024-11-21T23:20:43.151392Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:43.151470Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:47.109410Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:47.109487Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:47.150138Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037889 2024-11-21T23:20:47.150247Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 188.000000s, at schemeshard: 72075186224037889 2024-11-21T23:20:47.155579Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037889, stats size# 49 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2024-11-21T23:20:47.205034Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxSchemeShardStats::Complete 2024-11-21T23:20:48.946833Z node 2 :STATISTICS DEBUG: [72075186224037897] EvPropagateTimeout 2024-11-21T23:20:50.939684Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal 2024-11-21T23:20:50.939785Z node 2 :STATISTICS DEBUG: [72075186224037897] ScheduleNextTraversal. No force traversals. 2024-11-21T23:20:52.850548Z node 2 :STATISTICS DEBUG: [72075186224037897] PropagateStatistics(), node count = 2, schemeshard count = 1 2024-11-21T23:20:52.850964Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 ... waiting for TEvPropagateStatistics (done) 2024-11-21T23:20:52.876716Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:12977:7253]], StatType[ 0 ], StatRequestsCount[ 1 ] 2024-11-21T23:20:52.951001Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2024-11-21T23:20:53.113167Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2024-11-21T23:20:53.113263Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [2:12977:7253], StatRequests.size() = 1 >> TDynamicNameserverTest::TestCacheUsage >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96+StreamLookupJoin-ColumnStore |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |87.6%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 10472, MsgBus: 5569 2024-11-21T23:20:09.845845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876395452001182:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:09.845896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f16/r3tmp/tmpfiShMs/pdisk_1.dat 2024-11-21T23:20:10.211429Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10472, node 1 2024-11-21T23:20:10.267621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:10.267758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:10.269471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:20:10.286999Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:10.287023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:10.287035Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:10.287126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5569 TClient is connected to server localhost:5569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:10.818196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:10.834874Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:10.851669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:10.990221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:11.179874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:11.260450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:13.614943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876412631871856:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:13.622850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:13.663125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:13.724376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:13.816590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:13.859270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:13.935512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:14.013545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:14.087850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876416926839653:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:14.087932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:14.088306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876416926839658:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:14.093883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:14.105480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876416926839660:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:14.849787Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876395452001182:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:14.849883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:15.114810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:15.170792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10033, MsgBus: 18933 2024-11-21T23:20:17.083631Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876428535164617:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:17.083674Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f16/r3tmp/tmpkzPk7P/pdisk_1.dat 2024-11-21T23:20:17.244299Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:17.256412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:17.256469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:17.262333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10033, node 2 2024-11-21T23:20:17.338159Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:17.338182Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:17.338190Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:17.338286Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18933 TClient is connected to server localhost:18933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2024-11-21T23:20:17.803642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2024-11-21T23:20:17.822365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:17.899061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:20:18.068951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:20:18.150825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation ... t; 2024-11-21T23:20:48.418884Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876561144590547:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:48.419028Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:48.469817Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:48.541295Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:48.670048Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:48.736794Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:48.841867Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:48.977592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:49.154901Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876565439558372:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:49.155009Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:49.155680Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876565439558377:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:49.169363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:49.202838Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876565439558379:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:51.225032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:51.312970Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8883, MsgBus: 63983 2024-11-21T23:20:54.381861Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439876586058550785:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:54.382977Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f16/r3tmp/tmprDVNJX/pdisk_1.dat 2024-11-21T23:20:54.564132Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:54.627422Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:54.627530Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:54.628926Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8883, node 6 2024-11-21T23:20:54.766993Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:54.767018Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:54.767026Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:54.767157Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63983 TClient is connected to server localhost:63983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:55.470412Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:55.481251Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:20:55.504889Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:55.599610Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:55.830717Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:55.935934Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:58.811698Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876603238421650:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:58.811793Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:58.838864Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:58.928946Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.051323Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.141925Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.232549Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.379616Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439876586058550785:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:59.380311Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:59.399303Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.535590Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876607533389459:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:59.535742Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:59.536385Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876607533389464:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:59.542180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:59.570795Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439876607533389466:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:21:01.697482Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:01.882723Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS16+StreamLookupJoin-ColumnStore >> KqpJoinOrder::FourWayJoinLeftFirst-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TestJoinOrderHintsComplex+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore >> TBSV::ShardsNotLeftInShardsToDelete >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] >> TColumnShardTestSchema::ExportWithLostAnswer >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin+ColumnStore >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2024-11-21T23:21:09.471735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:21:09.471816Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 7992, MsgBus: 28622 2024-11-21T23:20:05.765949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876374658755318:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:05.766221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f32/r3tmp/tmpXtpE33/pdisk_1.dat 2024-11-21T23:20:06.261645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:06.261793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:06.265431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:20:06.266939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7992, node 1 2024-11-21T23:20:06.392756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:06.392784Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:06.392812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:06.392914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28622 TClient is connected to server localhost:28622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:07.044236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:07.068511Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:07.081250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:07.256891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:07.448168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:07.531010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:09.476571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876391838626225:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:09.476686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:09.700944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:09.794454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:09.840905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:09.878796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:09.976856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:10.015514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:10.081900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876396133594022:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:10.081999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:10.082296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876396133594027:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:10.085773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:10.101934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876396133594029:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:10.770538Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876374658755318:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:10.782370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:11.341109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:11.406090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12618, MsgBus: 2837 2024-11-21T23:20:13.403358Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876412739920232:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:13.403443Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f32/r3tmp/tmpAyTCQ7/pdisk_1.dat 2024-11-21T23:20:13.620226Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:13.633369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:13.633459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:13.636459Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12618, node 2 2024-11-21T23:20:13.809841Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:13.809865Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:13.809872Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:13.809973Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2837 TClient is connected to server localhost:2837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:14.364344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:14.382816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:14.484831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:14.681172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:14.757026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation pa ... default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:44.980833Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:45.044703Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:45.102594Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:45.156796Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:45.255503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:45.334621Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:45.402284Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:45.517154Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876550616939754:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:45.517260Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:45.517517Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876550616939759:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:45.522426Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:45.559010Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876550616939761:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:47.647878Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:47.775835Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1811, MsgBus: 24461 2024-11-21T23:20:51.315680Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439876573306932800:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:51.315757Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f32/r3tmp/tmpANgTgz/pdisk_1.dat 2024-11-21T23:20:51.873803Z node 6 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:51.957589Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:51.957921Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:51.960001Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1811, node 6 2024-11-21T23:20:52.139168Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:52.139203Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:52.139213Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:52.139379Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24461 TClient is connected to server localhost:24461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:53.332104Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:53.354787Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:53.371151Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:53.580018Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:54.139368Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:54.279035Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:56.319831Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439876573306932800:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:56.319919Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:01.826583Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876616256607507:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:01.826711Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:01.964302Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.049410Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.142938Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.272611Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.403412Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.536654Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.693512Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876620551575317:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:02.693668Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:02.698586Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876620551575322:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:02.713286Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:02.750644Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439876620551575324:2446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:05.680345Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:05.789471Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:06.842760Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:21:06.842812Z node 6 :IMPORT WARN: Table profiles were not loaded >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |87.6%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut |87.7%| [LD] {RESULT} $(B)/ydb/library/yql/utils/actors/ut/ydb-library-yql-utils-actors-ut >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup >> KqpJoinOrder::TPCDS16-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23-StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] Test command err: Trying to start YDB, gRPC: 8709, MsgBus: 17221 2024-11-21T23:20:16.410894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876425571967455:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:16.411106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000ec1/r3tmp/tmp7mDkLN/pdisk_1.dat 2024-11-21T23:20:17.101820Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:17.113966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:17.114227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:17.116293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8709, node 1 2024-11-21T23:20:17.223039Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:17.223060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:17.223070Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:17.223159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17221 TClient is connected to server localhost:17221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:17.836202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:17.864785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:18.012671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:18.166179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:18.252707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:20.388704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876442751838353:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.454243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.725355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.761018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.810888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.841378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.874095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.911098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:20.994910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876442751838849:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.995006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.995227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876442751838854:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:20.999511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:21.010557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876442751838856:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:21.534306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876425571967455:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:21.534556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:22.177618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:22.260392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8708, MsgBus: 9907 2024-11-21T23:20:24.525624Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876458636277932:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:24.525675Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000ec1/r3tmp/tmpbzHseb/pdisk_1.dat 2024-11-21T23:20:24.716247Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:24.717991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:24.718060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:24.719666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8708, node 2 2024-11-21T23:20:24.882942Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:24.882965Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:24.882980Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:24.883072Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9907 TClient is connected to server localhost:9907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:25.512182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:25.518474Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:20:25.528296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:25.584672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:25.764785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:25.855749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part ... 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:50.628042Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876569419476424:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:50.628174Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:50.656947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:50.752863Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:50.829189Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:50.957680Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:51.023564Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:51.081788Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:51.194863Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876573714444225:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:51.194969Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:51.195531Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876573714444230:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:51.200561Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:51.215306Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876573714444232:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:53.268546Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.379962Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.464440Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2390, MsgBus: 20435 2024-11-21T23:20:57.619914Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876598868411731:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:57.620175Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000ec1/r3tmp/tmpy43zJ2/pdisk_1.dat 2024-11-21T23:20:58.131200Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:58.342121Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:58.366690Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:58.367958Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2390, node 5 2024-11-21T23:20:58.511148Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:58.511179Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:58.511196Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:58.511344Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20435 TClient is connected to server localhost:20435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:59.865032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:59.894658Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:00.110123Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:00.707597Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:00.962043Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:02.621813Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876598868411731:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:02.622000Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:06.326582Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876637523118976:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:06.326693Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:06.381323Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:06.468620Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:06.542890Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:06.630066Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:06.701635Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:06.877765Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:06.994905Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876637523119492:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:06.995046Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:06.995385Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876637523119497:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:07.000735Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:07.049393Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876637523119499:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:08.872836Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 7060, MsgBus: 64554 2024-11-21T23:19:39.614467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876265207868407:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:39.618216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002899/r3tmp/tmp3efpzD/pdisk_1.dat 2024-11-21T23:19:40.052009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:40.053039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:40.062591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:40.095168Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7060, node 1 2024-11-21T23:19:40.221237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:40.221262Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:40.221269Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:40.221369Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64554 TClient is connected to server localhost:64554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:41.171989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.207141Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:41.222445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.391237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.653427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:41.757396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:43.931419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876282387739152:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.946196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:43.997784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.058955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.105963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.138428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.179417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.276747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:44.348029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876286682706950:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.348163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.348418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876286682706956:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:44.352959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:44.365058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876286682706958:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:44.658943Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876265207868407:2186];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:44.667454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:45.618734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:48.309091Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7439876299567609692:2489] TxId: 281474976710672. Ctx: { TraceId: 01jd8gcjfm925f4armvkdn8h09, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTIyMmQ3MzktMzk4ZGRmN2EtNDg2OWI2NTMtZjRhYTI1YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 100442498 > 50331648 2024-11-21T23:19:48.341348Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTIyMmQ3MzktMzk4ZGRmN2EtNDg2OWI2NTMtZjRhYTI1YjI=, ActorId: [1:7439876290977674915:2489], ActorState: ExecuteState, TraceId: 01jd8gcjfm925f4armvkdn8h09, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (100442498 > 50331648), code: 200509 Trying to start YDB, gRPC: 17781, MsgBus: 1319 2024-11-21T23:19:49.423522Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876307763344107:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:49.423576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002899/r3tmp/tmpkRsPcd/pdisk_1.dat 2024-11-21T23:19:49.791955Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:49.805732Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:49.805827Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:49.810467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17781, node 2 2024-11-21T23:19:49.929616Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:49.929642Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:49.929651Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:49.929767Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1319 TClient is connected to server localhost:1319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:50.568345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:50.579288Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:19:50.591970Z node 2 :FLAT_TX ... at schemeshard: 72057594046644480 2024-11-21T23:19:53.541756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876324943215507:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:53.541850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:53.542115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876324943215512:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:53.545876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:53.557554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876324943215514:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:54.423878Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876307763344107:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:54.423940Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:04.774570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:20:04.774615Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:51.672747Z node 2 :KQP_EXECUTER WARN: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jd8gct561ce05d66x8rhtfz2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWViZjFlODQtZjIyYTliZDMtNjk5ZDA1M2MtMWUzMGJmMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, memory limit exceeded. 2024-11-21T23:20:51.673250Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWViZjFlODQtZjIyYTliZDMtNjk5ZDA1M2MtMWUzMGJmMWQ=, ActorId: [2:7439876329238183125:2460], ActorState: ExecuteState, TraceId: 01jd8gct561ce05d66x8rhtfz2, Create QueryResponse for error on request, msg: 2024-11-21T23:20:51.673473Z node 2 :KQP_SLOW_LOG WARN: TraceId: "01jd8gct561ce05d66x8rhtfz2", SessionId: ydb://session/3?node_id=2&id=OWViZjFlODQtZjIyYTliZDMtNjk5ZDA1M2MtMWUzMGJmMWQ=, Slow query, duration: 56.882251s, status: PRECONDITION_FAILED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT ToDict(\n ListMap(\n ListFromRange(0ul, 5000000ul),\n ($x) -> { RETURN AsTuple($x, $x + 1); }\n )\n );\n ", parameters: 0b
: Warning: Type annotation, code: 1030
:2:13: Warning: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:2:20: Warning: At function: ToDict
:5:38: Warning: At function: OrderedMap
:5:53: Warning: At function: +
:5:53: Warning: Integral type implicit bitcast: Uint64 and Int32, code: 1107
: Error: Memory limit exceeded, code: 2029 Trying to start YDB, gRPC: 12524, MsgBus: 5014 2024-11-21T23:20:52.998868Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439876578280508564:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002899/r3tmp/tmpUjN9hq/pdisk_1.dat 2024-11-21T23:20:53.186436Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:20:53.447693Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:53.476560Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:53.476684Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:53.484250Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12524, node 3 2024-11-21T23:20:53.748735Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:53.748768Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:53.748779Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:53.748923Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5014 TClient is connected to server localhost:5014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:54.649239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:54.661306Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:20:54.682174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:54.776566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:54.976251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:55.062410Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:57.944488Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7439876578280508564:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:57.947831Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:57.959606Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876599755346599:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:57.959705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:58.014288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:58.095551Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:58.146000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:58.196880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:58.252847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:58.301398Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:58.353987Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876604050314400:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:58.354071Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:58.354484Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7439876604050314405:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:58.359369Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:58.375525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7439876604050314407:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:59.730505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:06.239784Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MmE4MjZiYmItYTQzMjVkODYtNDVlNmRjN2MtMzcxODFhMTU=, ActorId: [3:7439876608345282019:2462], ActorState: ExecuteState, TraceId: 01jd8gezer48m7wtdbfzzwwkd1, Create QueryResponse for error on request, msg: >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 28616, MsgBus: 18707 2024-11-21T23:20:11.037618Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876403245890451:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:11.037660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f13/r3tmp/tmpOWxUct/pdisk_1.dat 2024-11-21T23:20:11.423624Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:11.446780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:11.446883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:11.448576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28616, node 1 2024-11-21T23:20:11.530904Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:11.530932Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:11.530943Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:11.531054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18707 TClient is connected to server localhost:18707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:12.189606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:12.208075Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:12.224140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:12.370830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:12.547490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:12.632206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:14.601942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876416130793834:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:14.643787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:14.843209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:14.914799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:14.964159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:14.993948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:15.021045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:15.100241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:15.148753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876420425761631:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:15.148845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:15.149124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876420425761636:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:15.151971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:15.162085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876420425761638:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:16.037868Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876403245890451:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:16.037942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:16.351928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:16.381938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:16.412430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:16.489121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:16.524173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:16.592172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10449, MsgBus: 1610 2024-11-21T23:20:18.652775Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876433863803981:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:18.652821Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f13/r3tmp/tmpl46qA2/pdisk_1.dat 2024-11-21T23:20:18.749441Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:18.767280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:18.767365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:18.771090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10449, node 2 2024-11-21T23:20:18.824878Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:18.824904Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:18.824912Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:18.825000Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1610 TClient is connected to server localhost:1610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:19.235634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo un ... ICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876575373043160:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:51.896084Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:51.896138Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876575373043165:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:51.899921Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:51.927360Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876575373043167:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:53.600656Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.675278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.732568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.794075Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.855951Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.912239Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2972, MsgBus: 15140 2024-11-21T23:20:56.643252Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7439876594459276413:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:56.643305Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f13/r3tmp/tmpd1pBVB/pdisk_1.dat 2024-11-21T23:20:56.939186Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2972, node 6 2024-11-21T23:20:56.964978Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:56.965081Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:56.966912Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:20:57.074983Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:57.075020Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:57.075030Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:57.075150Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15140 TClient is connected to server localhost:15140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:57.843535Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:57.856189Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:57.877571Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:20:57.965499Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:20:58.165646Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:58.260424Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:01.643898Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7439876594459276413:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:01.643973Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:03.430807Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876624524049198:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:03.431056Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:03.455143Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:03.543977Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:03.654324Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:03.757836Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:03.845503Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:03.967239Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:04.345754Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876628819017023:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:04.345884Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:04.346339Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7439876628819017028:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:04.353723Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:04.399853Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7439876628819017030:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:09.000611Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:09.055449Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:09.106578Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:21:09.161883Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:21:09.224813Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:21:09.305142Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:21:11.914572Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:21:11.914605Z node 6 :IMPORT WARN: Table profiles were not loaded |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin+ColumnStore >> KqpCost::ScanScriptingRangeFullScan+SourceRead |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpJoin::IdxLookupLeftPredicate [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> TColumnShardTestSchema::RebootColdTiers >> KqpJoin::JoinLeftPureCross [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:21:14.830656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:21:14.830771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:21:14.830811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:21:14.831105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:21:14.842232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:21:14.842320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:21:14.842454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:21:14.851183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:21:15.079470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:21:15.079571Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:15.118977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:21:15.123320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:21:15.143708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:21:15.178563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:21:15.194718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:21:15.239125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:15.254895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:21:15.340567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:21:15.498718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:21:15.498823Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:21:15.499166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:21:15.499233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:21:15.507142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:21:15.507391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:21:15.535657Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:21:15.818559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:21:15.842514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:15.874527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:21:15.914286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:21:15.914437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:15.923761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:15.982523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:21:15.983002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:15.983079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:21:15.983137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:21:15.983173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:21:15.986465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:15.986537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:21:15.986575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:21:15.999605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:15.999709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:15.999758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:21:15.999811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:21:16.025296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:21:16.043307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:21:16.054528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:21:16.055926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:16.056127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:21:16.056207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:21:16.066755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:21:16.066850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:21:16.077433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:21:16.077687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:21:16.094555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:21:16.094643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:21:16.094882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:21:16.094932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:21:16.095348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:16.095405Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:21:16.095503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:21:16.095538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:21:16.095581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:21:16.095652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:21:16.095727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:21:16.095763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:21:16.095834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:21:16.095870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:21:16.095901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:21:16.098241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:21:16.098376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:21:16.098458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:21:16.098534Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:21:16.098581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:21:16.098692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... erId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2024-11-21T23:21:16.645883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409546 2024-11-21T23:21:16.646030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:16.646447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2024-11-21T23:21:16.650803Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2024-11-21T23:21:16.654820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T23:21:16.655088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2024-11-21T23:21:16.684031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2024-11-21T23:21:16.684197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2024-11-21T23:21:16.693915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:16.694088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:21:16.694158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2024-11-21T23:21:16.694289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:21:16.694460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2024-11-21T23:21:16.694505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:21:16.694584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:21:16.694696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:21:16.694739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2024-11-21T23:21:16.694793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2024-11-21T23:21:16.694831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2024-11-21T23:21:16.694865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2024-11-21T23:21:16.694980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2024-11-21T23:21:16.695033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2024-11-21T23:21:16.695066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2024-11-21T23:21:16.695098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2024-11-21T23:21:16.702731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T23:21:16.702809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2024-11-21T23:21:16.703079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2024-11-21T23:21:16.703134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2024-11-21T23:21:16.703970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:21:16.704025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:21:16.704184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:21:16.704215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:21:16.704359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2024-11-21T23:21:16.704489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:21:16.704519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2024-11-21T23:21:16.704550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2024-11-21T23:21:16.705107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:21:16.705204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:21:16.705236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:21:16.705276Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2024-11-21T23:21:16.705317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2024-11-21T23:21:16.705717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:21:16.705760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2024-11-21T23:21:16.705817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:21:16.706276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:21:16.706361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2024-11-21T23:21:16.706387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2024-11-21T23:21:16.706459Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2024-11-21T23:21:16.706483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:21:16.706540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2024-11-21T23:21:16.706791Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2024-11-21T23:21:16.707096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:16.707489Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2024-11-21T23:21:16.707652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2024-11-21T23:21:16.716083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:21:16.719096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:21:16.719268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2024-11-21T23:21:16.721357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2024-11-21T23:21:16.721457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2024-11-21T23:21:16.721756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2024-11-21T23:21:16.721824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2024-11-21T23:21:16.722302Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2024-11-21T23:21:16.722426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2024-11-21T23:21:16.722497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:389:2370] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2024-11-21T23:21:16.723105Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2024-11-21T23:21:16.744731Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> KqpLimits::CancelAfterRoTxWithFollowerLegacyDependedRead [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TColumnShardTestSchema::RebootInternalTTL |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureCross [GOOD] Test command err: Trying to start YDB, gRPC: 23939, MsgBus: 15815 2024-11-21T23:20:26.091971Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876468598656040:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:26.092019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e9d/r3tmp/tmp0tymrc/pdisk_1.dat 2024-11-21T23:20:26.532396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:26.532490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:26.540200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:20:26.602994Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23939, node 1 2024-11-21T23:20:26.756296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:26.756321Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:26.756358Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:26.756474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15815 TClient is connected to server localhost:15815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:27.698201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:27.719615Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:27.725356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:27.931832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:28.126633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:28.236800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:30.508121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876485778526743:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:30.508218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:30.765236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.822756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.886163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.952236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:30.991923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:31.039956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:31.087840Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876468598656040:2254];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:31.087931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:31.114791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876490073494533:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:31.114869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:31.115140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876490073494539:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:31.119994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:31.134980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876490073494541:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:32.620257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:32.694859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:32.740969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11838, MsgBus: 20047 2024-11-21T23:20:34.637959Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876503268723851:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:34.644261Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e9d/r3tmp/tmpDMXCy9/pdisk_1.dat 2024-11-21T23:20:34.824084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:34.824163Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:34.825197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11838, node 2 2024-11-21T23:20:34.907281Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:35.001663Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:35.001683Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:35.001693Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:35.001804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20047 TClient is connected to server localhost:20047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:35.760542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:35.770012Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:20:35.781076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:35.898307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCrea ... CHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:56.151438Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:56.404115Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:59.019789Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876609203884852:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:59.019887Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:59.076588Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.137441Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.219079Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.270923Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.331349Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.401631Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:59.501165Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876609203885352:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:59.501246Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:59.501427Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876609203885357:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:59.518705Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:59.547451Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876609203885359:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:20:59.641194Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7439876587729046669:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:59.641369Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25213, MsgBus: 24619 2024-11-21T23:21:04.111334Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876625129925495:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e9d/r3tmp/tmpAoIT0h/pdisk_1.dat 2024-11-21T23:21:04.325570Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:21:04.478025Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:04.501269Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:04.501379Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:04.507602Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25213, node 5 2024-11-21T23:21:04.763019Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:04.763053Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:04.763064Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:04.763199Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24619 TClient is connected to server localhost:24619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:06.284977Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:06.305380Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:06.578961Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:07.528443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:21:07.764193Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:21:09.002941Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876625129925495:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:09.003022Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:14.339994Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876672374567338:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:14.347814Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:14.379271Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:14.456956Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:14.602255Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:14.663773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:14.709098Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:14.824756Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:14.971442Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876672374567844:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:14.971593Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:14.972012Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876672374567850:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:14.977431Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:15.004128Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876672374567852:2444], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtl |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:112:2058] recipient: [1:106:2138] Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:124:2058] recipient: [1:106:2138] 2024-11-21T23:21:19.747565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2024-11-21T23:21:19.747662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:21:19.747717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2024-11-21T23:21:19.747779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2024-11-21T23:21:19.747832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2024-11-21T23:21:19.747859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2024-11-21T23:21:19.747910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2024-11-21T23:21:19.748251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2024-11-21T23:21:19.859018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2024-11-21T23:21:19.859083Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:19.887435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2024-11-21T23:21:19.891392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2024-11-21T23:21:19.891611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2024-11-21T23:21:19.903443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2024-11-21T23:21:19.910720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2024-11-21T23:21:19.911401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:19.911824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2024-11-21T23:21:19.916764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:21:19.918029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:21:19.918085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:21:19.918303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2024-11-21T23:21:19.918352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:21:19.922506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2024-11-21T23:21:19.922679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2024-11-21T23:21:19.934261Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:121:2147] sender: [1:236:2058] recipient: [1:15:2062] 2024-11-21T23:21:20.100852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2024-11-21T23:21:20.101097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:20.101320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2024-11-21T23:21:20.101630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2024-11-21T23:21:20.101693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:20.110362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:20.110560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2024-11-21T23:21:20.110843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:20.110910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet72057594046678944 2024-11-21T23:21:20.110961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2024-11-21T23:21:20.111019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2024-11-21T23:21:20.115379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:20.115462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId#1:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:21:20.115497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2024-11-21T23:21:20.117967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:20.118045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:20.118106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:21:20.118156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2024-11-21T23:21:20.122240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:21:20.124452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2024-11-21T23:21:20.124730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2024-11-21T23:21:20.125679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:20.125830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:21:20.125883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:21:20.126116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2024-11-21T23:21:20.126158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet 72057594046678944 2024-11-21T23:21:20.126353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:21:20.126496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2024-11-21T23:21:20.128786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:21:20.128860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:21:20.129062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:21:20.129110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 2024-11-21T23:21:20.129495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2024-11-21T23:21:20.129543Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2024-11-21T23:21:20.129639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2024-11-21T23:21:20.129671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:21:20.129713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2024-11-21T23:21:20.129764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2024-11-21T23:21:20.129837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2024-11-21T23:21:20.129882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2024-11-21T23:21:20.129966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:21:20.130004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2024-11-21T23:21:20.130056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2024-11-21T23:21:20.131818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:21:20.131920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2024-11-21T23:21:20.131973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2024-11-21T23:21:20.132031Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2024-11-21T23:21:20.132069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:21:20.132178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & r ... HARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 ProgressState, at schemeshard: 72057594046678944 2024-11-21T23:21:21.660612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2024-11-21T23:21:21.660800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2024-11-21T23:21:21.662524Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 Forgetting tablet 72075186233409569 2024-11-21T23:21:21.663624Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 2024-11-21T23:21:21.663829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2024-11-21T23:21:21.664078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 Forgetting tablet 72075186233409568 2024-11-21T23:21:21.664557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T23:21:21.664719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2024-11-21T23:21:21.664897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 2024-11-21T23:21:21.665272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T23:21:21.669406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2024-11-21T23:21:21.669556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2024-11-21T23:21:21.670522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2024-11-21T23:21:21.670642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969450 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2024-11-21T23:21:21.670703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2024-11-21T23:21:21.670823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2024-11-21T23:21:21.670960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2024-11-21T23:21:21.671011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T23:21:21.671084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:21:21.671143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2024-11-21T23:21:21.671179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2024-11-21T23:21:21.671230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2024-11-21T23:21:21.671265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2024-11-21T23:21:21.671296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2024-11-21T23:21:21.671426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2024-11-21T23:21:21.671465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2024-11-21T23:21:21.671499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2024-11-21T23:21:21.671531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2024-11-21T23:21:21.673068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T23:21:21.673123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2024-11-21T23:21:21.673216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2024-11-21T23:21:21.673255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2024-11-21T23:21:21.674921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:21:21.674962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2024-11-21T23:21:21.675120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2024-11-21T23:21:21.675157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2024-11-21T23:21:21.675333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2024-11-21T23:21:21.675461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2024-11-21T23:21:21.675491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 129, path id: 1 2024-11-21T23:21:21.675538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2206], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2024-11-21T23:21:21.676081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:21:21.676177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:21:21.676225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2024-11-21T23:21:21.676263Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2024-11-21T23:21:21.676297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2024-11-21T23:21:21.676724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2024-11-21T23:21:21.676777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2024-11-21T23:21:21.676859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2024-11-21T23:21:21.677141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:21:21.677223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2024-11-21T23:21:21.677276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2024-11-21T23:21:21.677326Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2024-11-21T23:21:21.677356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2024-11-21T23:21:21.677439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2024-11-21T23:21:21.677828Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2024-11-21T23:21:21.677999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2024-11-21T23:21:21.678216Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2024-11-21T23:21:21.678364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2024-11-21T23:21:21.681848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T23:21:21.681998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2024-11-21T23:21:21.683676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2024-11-21T23:21:21.683822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2024-11-21T23:21:21.683917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2024-11-21T23:21:21.684537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2024-11-21T23:21:21.684582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2024-11-21T23:21:21.685428Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2024-11-21T23:21:21.685526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2024-11-21T23:21:21.685563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1670:3541] TestWaitNotification: OK eventTxId 129 >> TColumnShardTestSchema::RebootExportWithLostAnswer |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] >> OlapEstimationRowsCorrectness::TPCH3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2629, MsgBus: 9757 2024-11-21T23:20:22.177878Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876450767673511:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:22.177919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000eab/r3tmp/tmp8xo9Ec/pdisk_1.dat 2024-11-21T23:20:22.606537Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:22.609566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:22.609640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:22.617091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2629, node 1 2024-11-21T23:20:22.730711Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:22.730734Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:22.730744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:22.730833Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9757 TClient is connected to server localhost:9757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:23.390936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:20:23.422974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2024-11-21T23:20:23.641523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:20:23.851242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:23.942996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.016103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876467947544404:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:26.016197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:26.295799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.331123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.360671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.413173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.452843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.511276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:26.658639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876467947544907:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:26.658756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:26.662534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876467947544912:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:26.666965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:26.692404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876467947544914:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:27.187077Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876450767673511:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:27.187141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:28.110922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.187786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.234616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.276667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.302573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:28.351533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27735, MsgBus: 3592 2024-11-21T23:20:32.132886Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876492608969979:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:32.278615Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000eab/r3tmp/tmpLbOiri/pdisk_1.dat 2024-11-21T23:20:32.453806Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:32.475757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:32.475847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:32.478420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27735, node 2 2024-11-21T23:20:32.637991Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:32.638013Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:32.638025Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:32.638135Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3592 TClient is connected to server localhost:3592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:33.412950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:33.419627Z node 2 :FLAT_TX_ ... t; 2024-11-21T23:21:01.738782Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876618689541987:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:01.738950Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:01.781687Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:01.869119Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:01.918053Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:01.999390Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.097428Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.246907Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:02.402074Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876622984509797:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:02.402167Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:02.402600Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876622984509802:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:02.412341Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:02.439054Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876622984509804:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:05.296148Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:05.357109Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3327, MsgBus: 17388 2024-11-21T23:21:10.363732Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876656438957427:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:10.363786Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000eab/r3tmp/tmpMzas6A/pdisk_1.dat 2024-11-21T23:21:11.163639Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:11.188141Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:11.188280Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:11.198680Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3327, node 5 2024-11-21T23:21:11.511188Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:11.511217Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:11.511230Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:11.511363Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17388 TClient is connected to server localhost:17388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:12.963614Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:12.978923Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:21:12.994032Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:13.147418Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:13.601460Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:13.869823Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:15.367884Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876656438957427:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:15.367948Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:18.647848Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876690798697550:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:18.647964Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:18.759968Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:18.806129Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:18.864025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:18.931860Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:19.030249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:19.086991Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:19.173780Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876695093665349:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:19.173905Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:19.174247Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876695093665354:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:19.179803Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:19.200319Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876695093665356:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:20.747040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:20.817786Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |87.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] >> KqpJoinOrder::TPCDS9+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS9-StreamLookupJoin+ColumnStore |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] >> KqpJoin::FullOuterJoin2 |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/dstool/ydb-dstool |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/dstool/ydb-dstool |87.8%| [LD] {RESULT} $(B)/ydb/apps/dstool/ydb-dstool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 65108, MsgBus: 14835 2024-11-21T23:20:31.442977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876490181900716:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:31.443329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e82/r3tmp/tmpaYZgp6/pdisk_1.dat 2024-11-21T23:20:32.157740Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:32.172093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:32.172191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:32.180563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65108, node 1 2024-11-21T23:20:32.494828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:32.494861Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:32.494869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:32.494972Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14835 TClient is connected to server localhost:14835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:33.317563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:33.353699Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:33.369737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:33.615504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:33.813796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:33.971515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:36.174630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876511656738789:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:36.174749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:36.429230Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876490181900716:2191];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:36.429295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:36.593867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.624900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.664610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.703546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.766826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.850600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.924881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876511656739288:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:36.924980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:36.925200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876511656739293:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:36.929100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:36.946822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876511656739295:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:38.359131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.406063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.450925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.502869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.543943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:38.596377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20394, MsgBus: 2358 2024-11-21T23:20:40.689352Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876528123167330:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e82/r3tmp/tmpQdMcyK/pdisk_1.dat 2024-11-21T23:20:40.754703Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:20:40.828994Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:40.891907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:40.891988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:40.904012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20394, node 2 2024-11-21T23:20:41.053380Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:41.053411Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:41.053421Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:41.053539Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2358 TClient is connected to server localhost:2358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:42.126202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo un ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:08.218795Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876645286014793:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:08.218987Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:08.226601Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876645286014799:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:08.235700Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:08.251691Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876645286014801:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:10.201249Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:10.284818Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:10.373697Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:21:10.468283Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:21:10.588384Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:21:10.689784Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20584, MsgBus: 20544 2024-11-21T23:21:14.402936Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876673320114541:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:14.403010Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e82/r3tmp/tmpO5NB2R/pdisk_1.dat 2024-11-21T23:21:14.568518Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:14.608153Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:14.608267Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:14.610929Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20584, node 5 2024-11-21T23:21:14.778516Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:14.778549Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:14.778563Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:14.778701Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20544 TClient is connected to server localhost:20544 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:15.932865Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:15.964111Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:21:15.993453Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:16.132251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:16.374201Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:16.534174Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:19.402848Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876673320114541:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:19.402939Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:20.639754Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876699089920041:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:20.639884Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:20.736627Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:20.801630Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:20.894775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:20.982525Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:21.055596Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:21.296945Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:21.464677Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876703384887862:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:21.464797Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:21.464991Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876703384887868:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:21.469468Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:21.522457Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876703384887870:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:21:23.734270Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:23.793813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:23.854851Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:21:23.940235Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:21:24.016976Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T23:21:24.100040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> TColumnShardTestSchema::RebootHotTiersTtlWithStat >> TColumnShardTestSchema::RebootInternalTTL [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpJoinOrder::TPCDS90-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92-StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS96+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS96-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootInternalTTL [GOOD] Test command err: 2024-11-21T23:21:20.647450Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:21:20.766491Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:21:20.771377Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:21:20.771798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:21:20.796562Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:21:20.796681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:21:20.796956Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:21:20.808080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:21:20.808327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:21:20.808565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:21:20.808683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:21:20.808783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:21:20.808872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:21:20.809014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:21:20.809111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:21:20.809218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:21:20.809311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:21:20.809431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:21:20.809556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:21:20.833618Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:21:20.833985Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T23:21:20.834069Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:21:20.834117Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:21:20.860429Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:21:20.860766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:21:20.860819Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:21:20.861004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:20.861187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:21:20.861257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:21:20.861299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:21:20.861421Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:21:20.861490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:21:20.861530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:21:20.861567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:21:20.861752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:20.861825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:21:20.861862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:21:20.861914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:21:20.862005Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:21:20.862068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:21:20.862125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:21:20.862157Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:21:20.862233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:21:20.862269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:21:20.862299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:21:20.862344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:21:20.862384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:21:20.862436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:21:20.862626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2024-11-21T23:21:20.862721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2024-11-21T23:21:20.862808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2024-11-21T23:21:20.862887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2024-11-21T23:21:20.863047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:21:20.863112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:21:20.863147Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:21:20.863349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:21:20.863395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:21:20.863427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:21:20.863625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:21:20.863682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2 ... 0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:2;records_count:53332;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.748353Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:21:28.748488Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26668; 2024-11-21T23:21:28.748553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213344;num_rows=26668;batch_columns=saved_at; 2024-11-21T23:21:28.748811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:544:2539] send ScanData to [1:543:2538] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213344 rows: 26668 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T23:21:28.748974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.749103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.749140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:21:28.749185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:21:28.749314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:21:28.749395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:26664;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.749423Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:21:28.749508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=26664; 2024-11-21T23:21:28.749550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=213312;num_rows=26664;batch_columns=saved_at; 2024-11-21T23:21:28.749722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:544:2539] send ScanData to [1:543:2538] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 213312 rows: 26664 page faults: 0 finished: 0 pageFault: 0 arrow schema: saved_at: timestamp[us] 2024-11-21T23:21:28.749803Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.749884Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.749986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.750085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:21:28.750153Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.750213Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2024-11-21T23:21:28.750245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:544:2539] finished for tablet 9437184 2024-11-21T23:21:28.750316Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:544:2539] send ScanData to [1:543:2538] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:21:28.751052Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:544:2539] and sent to [1:543:2538] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_task_result"],"t":0.263},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.266}],"full":{"a":1732231288483600,"name":"_full_task","f":1732231288483600,"d_finished":0,"c":0,"l":1732231288750360,"d":266760},"events":[{"name":"bootstrap","f":1732231288483809,"d_finished":5404,"c":1,"l":1732231288489213,"d":5404},{"a":1732231288750069,"name":"ack","f":1732231288747039,"d_finished":2785,"c":3,"l":1732231288750011,"d":3076},{"a":1732231288750059,"name":"processing","f":1732231288490972,"d_finished":178987,"c":27,"l":1732231288750013,"d":179288},{"name":"ProduceResults","f":1732231288485875,"d_finished":10655,"c":32,"l":1732231288750228,"d":10655},{"a":1732231288750230,"name":"Finish","f":1732231288750230,"d_finished":0,"c":0,"l":1732231288750360,"d":130},{"name":"task_result","f":1732231288491006,"d_finished":175661,"c":24,"l":1732231288746897,"d":175661}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;) 2024-11-21T23:21:28.751143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:21:28.482559Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4997532;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4997532;selected_rows=0; 2024-11-21T23:21:28.751189Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:21:28.751358Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.146567s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.263037s;size=0.002681008;details={columns=1,2,3,4,9,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::LAST_PK;duration=0.026586s;size=0.00311667;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:21:28.751457Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:544:2539];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> LabeledDbCounters::OneTabletRestart [GOOD] |87.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCH10-StreamLookupJoin+ColumnStore [GOOD] >> TColumnShardTestSchema::OneTier >> KqpJoinOrder::TPCDS9_SMALL-StreamLookupJoin-ColumnStore |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |87.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [LD] {RESULT} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 22454, MsgBus: 27684 2024-11-21T23:21:18.544332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876688819483720:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:18.544446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001b5e/r3tmp/tmpX9kqS9/pdisk_1.dat 2024-11-21T23:21:19.592540Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:19.655392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:19.655478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:19.655731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:21:19.703094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22454, node 1 2024-11-21T23:21:20.864481Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:20.864524Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:20.864538Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:20.864630Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27684 TClient is connected to server localhost:27684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2024-11-21T23:21:23.592721Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876688819483720:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:23.592995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:23.731602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:23.939594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:24.303785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:24.522548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:24.631475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:25.212065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876718884256324:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:25.224831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:27.448836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:27.493503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:27.548897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:27.597826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:27.665021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:27.778883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:27.996740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876727474191495:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:27.996824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:27.997189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876727474191500:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:28.014103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:28.047966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876727474191502:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:30.734613Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231290720, txId: 281474976710671] shutting down >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-StreamLookupJoin+ColumnStore |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut_kqp/unittest >> LabeledDbCounters::OneTabletRestart [GOOD] Test command err: 2024-11-21T23:15:42.147211Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439875245024324738:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:42.147269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001910/r3tmp/tmpmH0qrX/pdisk_1.dat 2024-11-21T23:15:43.146514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:15:43.582500Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:43.607984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:43.608070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:43.618887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21203, node 1 2024-11-21T23:15:43.924791Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:15:43.924813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:15:43.924818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:15:43.924907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:15:44.774297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:15:44.863749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:15:45.014551Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [1:7439875240729357146:2070] 2024-11-21T23:15:45.022861Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7439875257622201111:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:45.118522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:15:45.118615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:15:45.135175Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2024-11-21T23:15:45.137498Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:15:45.139186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:15:45.287832Z node 3 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2024-11-21T23:15:45.287911Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2024-11-21T23:15:45.347117Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [3:7439875249032266321:2061] 2024-11-21T23:15:45.461106Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7439875257622200972:2056], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2024-11-21T23:15:45.461821Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2024-11-21T23:15:45.461866Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2024-11-21T23:15:45.469799Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2024-11-21T23:15:45.469854Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2024-11-21T23:15:45.469895Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2024-11-21T23:15:45.469980Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2024-11-21T23:15:45.470028Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2024-11-21T23:15:45.470086Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2024-11-21T23:15:45.470124Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2024-11-21T23:15:45.470156Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2024-11-21T23:15:45.473430Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2024-11-21T23:15:45.486528Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2024-11-21T23:15:45.486586Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2024-11-21T23:15:45.486619Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2024-11-21T23:15:45.486663Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2024-11-21T23:15:45.486705Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2024-11-21T23:15:45.486750Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2024-11-21T23:15:45.486795Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2024-11-21T23:15:45.486832Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2024-11-21T23:15:45.486864Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2024-11-21T23:15:45.487018Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2024-11-21T23:15:45.000000Z 2024-11-21T23:15:45.487166Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2024-11-21T23:15:45.487295Z node 3 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:15:45.513911Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [1:7439875240729357146:2070] 2024-11-21T23:15:45.516028Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7439875257622200972:2056], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Database1 2024-11-21T23:15:45.522058Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7439875257622200972:2056], database# /Root/Database1, no sysview processor 2024-11-21T23:15:45.550901Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2024-11-21T23:15:45.559590Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [2:7439875244741662179:2061] 2024-11-21T23:15:45.566631Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2024-11-21T23:15:45.566737Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2024-11-21T23:15:45.000000Z, query count# 0 2024-11-21T23:15:45.566782Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T23:15:45.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:45.566803Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T23:15:45.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:45.566818Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T23:15:45.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:45.566840Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T23:15:45.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:45.566863Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:45.566883Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:45.566901Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:45.566922Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:15:45.577877Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Database1 2024-11-21T23:15:45.585309Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2024-11-21T23:15:45.585383Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [3:7439875257622200972:2056] 2024-11-21T23:15:45.596537Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7439875257622200972:2056], database# /Root/Database1, no sysview processor 2024-11-21T23:15:45.597612Z node 3 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2024-11-21T23:15:45.600080Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2024-11-21T23:15:45.614593Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [1:7439875245024324536:2055] 2024-11-21T23:15:45.660968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2024-11-21T23:15:45.666956Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [3:7439875257622200972:2056] waiting... 2024-11-21T23:15:45.726523Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [1:7439875245024324536:2055] 2024-11-21T23:15:45.814848Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439875261921531630:2256];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:15:45.814899Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_b ... ters: service id# [12:7439875946959237252:2127] 2024-11-21T23:21:25.294712Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [10:7439875940582597707:2070] 2024-11-21T23:21:25.322300Z node 11 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables 2024-11-21T23:21:25.466604Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [12:7439875942664269824:2061] 2024-11-21T23:21:25.509317Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Execute 2024-11-21T23:21:25.509412Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryResults: interval end# 2024-11-21T23:21:25.000000Z, query count# 0 2024-11-21T23:21:25.509448Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T23:21:25.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:25.509504Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T23:21:25.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:25.509530Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T23:21:25.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:25.509568Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T23:21:25.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:25.509599Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 9, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:25.509621Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 11, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:25.509659Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 13, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:25.509687Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 15, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:25.511077Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [11:7439875937250926240:2061] 2024-11-21T23:21:25.526709Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Complete 2024-11-21T23:21:25.538724Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [12:7439875946959237252:2127] 2024-11-21T23:21:25.539786Z node 12 :SYSTEM_VIEWS DEBUG: Send counters: service id# [12:7439875946959237252:2127], processor id# 72075186224037894, database# /Root/PQ, generation# 13938018179328787256, node id# 12, is retrying# 0, is labeled# 0 2024-11-21T23:21:25.544884Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvSendDbCountersRequest: node id# 12, generation# 13938018179328787256, services count# 1, request size# 293 2024-11-21T23:21:25.545841Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvSendDbCountersResponse: service id# [12:7439875946959237252:2127], database# /Root/PQ, generation# 13938018179328787256 2024-11-21T23:21:25.564797Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [11:7439875945840860894:2056] 2024-11-21T23:21:25.565799Z node 12 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [12:7439875942664269824:2061] 2024-11-21T23:21:25.586912Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [10:7439875944877565127:2110] 2024-11-21T23:21:25.587281Z node 10 :SYSTEM_VIEWS DEBUG: Send counters: service id# [10:7439875944877565127:2110], processor id# 72075186224037894, database# /Root/PQ, generation# 18211726538641722040, node id# 10, is retrying# 0, is labeled# 1 2024-11-21T23:21:25.594718Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvSendDbLabeledCountersRequest: node id# 10, generation# 18211726538641722040, request size# 53 2024-11-21T23:21:25.595172Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvSendDbLabeledCountersResponse: service id# [10:7439875944877565127:2110], database# /Root/PQ, generation# 18211726538641722040 2024-11-21T23:21:25.686815Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7439875940582597707:2070] 2024-11-21T23:21:25.738754Z node 10 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [10:7439875944877565127:2110] 2024-11-21T23:21:25.746729Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439875944877565127:2110], database# /Root, no sysview processor 2024-11-21T23:21:25.862628Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [11:7439875937250926240:2061] 2024-11-21T23:21:25.971126Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvApplyLabeledCounters: services count# 1 2024-11-21T23:21:26.082537Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Execute 2024-11-21T23:21:26.082610Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 17, partition interval end# 2024-11-21T23:21:26.000000Z, partition count# 0 2024-11-21T23:21:26.082638Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistPartitionTopResults: table id# 18, partition interval end# 2024-11-22T00:00:00.000000Z, partition count# 0 2024-11-21T23:21:26.082757Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] Reset: interval end# 2024-11-21T23:21:26.000000Z 2024-11-21T23:21:26.083189Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [11:7439875945840860894:2056] 2024-11-21T23:21:26.084152Z node 11 :SYSTEM_VIEWS DEBUG: Send counters: service id# [11:7439875945840860894:2056], processor id# 72075186224037894, database# /Root/PQ, generation# 9399813003341038946, node id# 11, is retrying# 0, is labeled# 0 2024-11-21T23:21:26.085740Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvSendDbCountersRequest: node id# 11, generation# 9399813003341038946, services count# 1, request size# 596 2024-11-21T23:21:26.085851Z node 11 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvSendDbCountersResponse: service id# [11:7439875945840860894:2056], database# /Root/PQ, generation# 9399813003341038946 2024-11-21T23:21:26.100588Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TEvApplyCounters: services count# 1 2024-11-21T23:21:26.100687Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxCollect::Complete 2024-11-21T23:21:26.323167Z node 11 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded: no tables iteration 6
name=topic.partition.alive_count: 32
name=topic.partition.init_duration_milliseconds_max: 146
name=topic.partition.producers_count_max: 0
name=topic.partition.read.inflight_throttled_microseconds_max: 0
name=topic.partition.read.speed_limit_bytes_per_second: 0
name=topic.partition.read.throttled_microseconds_max: 0
name=topic.partition.read_without_consumer.speed_limit_bytes_per_second: 0
name=topic.partition.read_without_consumer.throttled_microseconds_max: 0
name=topic.partition.storage_bytes_max: 0
name=topic.partition.total_count: 32
name=topic.partition.uptime_milliseconds_min: 59268
name=topic.partition.write.bytes_per_day_max: 0
name=topic.partition.write.bytes_per_hour_max: 0
name=topic.partition.write.bytes_per_minute_max: 0
name=topic.partition.write.idle_milliseconds_max: 59311
name=topic.partition.write.lag_milliseconds_max: 0
name=topic.partition.write.speed_limit_bytes_per_second: 50000000
name=topic.partition.write.throttled_microseconds_max: 0
name=topic.producers_count: 0
name=topic.reserve.limit_bytes: 0
name=topic.reserve.used_bytes: 0
name=topic.storage_bytes: 0
consumer=user:
    name=topic.partition.alive_count: 32
    name=topic.partition.committed_end_to_end_lag_milliseconds_max: 9152
    name=topic.partition.committed_lag_messages_max: 0
    name=topic.partition.committed_read_lag_milliseconds_max: 9152
    name=topic.partition.end_to_end_lag_milliseconds_max: 50055
    name=topic.partition.read.idle_milliseconds_max: 59196
    name=topic.partition.read.lag_messages_max: 0
    name=topic.partition.read.lag_milliseconds_max: 0
    name=topic.partition.read.speed_limit_bytes_per_second: 0
    name=topic.partition.read.throttled_microseconds_max: 0
    name=topic.partition.write.lag_milliseconds_max: 0
    name=topic.read.lag_messages: 0
CHECK COUNTER topic.partition.uptime_milliseconds_min wait less than 60200 got 59268 CHECK COUNTER topic.partition.alive_count wait 32 got 32 2024-11-21T23:21:26.570727Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] TTxAggregate::Execute 2024-11-21T23:21:26.570808Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryResults: interval end# 2024-11-21T23:21:26.000000Z, query count# 0 2024-11-21T23:21:26.570839Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 8, interval end# 2024-11-21T23:21:26.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:26.570877Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 10, interval end# 2024-11-21T23:21:26.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:26.570902Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 12, interval end# 2024-11-21T23:21:26.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:26.570927Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 14, interval end# 2024-11-21T23:21:26.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:26.570955Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 9, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:26.570986Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 11, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:26.571012Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 13, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:26.571046Z node 11 :SYSTEM_VIEWS DEBUG: [72075186224037894] PersistQueryTopResults: table id# 15, interval end# 2024-11-22T00:00:00.000000Z, query count# 0, persisted# 0 2024-11-21T23:21:26.615686Z node 10 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [10:7439875944877565127:2110], processor id# 72075186224037894, database# /Root/PQ 2024-11-21T23:21:26.618731Z node 12 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [12:7439875946959237252:2127], processor id# 72075186224037894, database# /Root/PQ 2024-11-21T23:21:26.620655Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2024-11-21T23:21:26.631515Z node 12 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [12:7439875946959237252:2127], database# /Root/PQ, processor id# 72075186224037894 2024-11-21T23:21:26.656575Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:21:26.667075Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2024-11-21T23:21:26.667728Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:21:26.731976Z node 11 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [11:7439875945840860894:2056], processor id# 72075186224037894, database# /Root/PQ 2024-11-21T23:21:26.732184Z node 11 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [11:7439875945840860894:2056], database# /Root/PQ, processor id# 72075186224037894 2024-11-21T23:21:26.878895Z node 10 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [10:7439875944877565127:2110], database# /Root/PQ, processor id# 72075186224037894 |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |87.9%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] >> KqpCost::Range >> KqpCost::ScanScriptingRangeFullScan-SourceRead |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:339;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=1;before_size=0;after_size=33376;before_rows=0;after_rows=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=33376;portion_bytes=33376;portion_raw_bytes=53670; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=2;before_size=33376;after_size=70288;before_rows=1000;after_rows=2000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36912;portion_bytes=36912;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=3;before_size=70288;after_size=107232;before_rows=2000;after_rows=3000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36944;portion_bytes=36944;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=4;before_size=107232;after_size=144208;before_rows=3000;after_rows=4000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=36976;portion_bytes=36976;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:17;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:103;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:103;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:103;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:103;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:45;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=5;before_size=144208;after_size=181232;before_rows=4000;after_rows=5000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37024;portion_bytes=37024;portion_raw_bytes=57000; FALLBACK_ACTOR_LOGGING;priority=TRACE;componen ... iority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=84;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=85;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=86;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=87;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=88;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37424;portion_bytes=37424;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=89;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37424;portion_bytes=37424;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=90;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=91;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=92;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=93;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=94;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=95;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=96;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=97;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=98;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37616;portion_bytes=37616;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=99;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37624;portion_bytes=37624;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37392;portion_bytes=37392;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=100;before_size=0;after_size=0;before_rows=0;after_rows=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37392;portion_bytes=37392;portion_raw_bytes=60000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1624136;portion_bytes=1624136;portion_raw_bytes=2538150; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=101;before_size=0;after_size=1624136;before_rows=0;after_rows=42858; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1624136;portion_bytes=1624136;portion_raw_bytes=2538150; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1688792;portion_bytes=1688792;portion_raw_bytes=2571480; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=102;before_size=1624136;after_size=3312928;before_rows=42858;after_rows=85716; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=1688792;portion_bytes=1688792;portion_raw_bytes=2571480; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=560112;portion_bytes=560112;portion_raw_bytes=857040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=103;before_size=3312928;after_size=3873040;before_rows=85716;after_rows=100000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=560112;portion_bytes=560112;portion_raw_bytes=857040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=64000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=104;before_size=3873040;after_size=3910688;before_rows=100000;after_rows=101000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37648;portion_bytes=37648;portion_raw_bytes=64000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37656;portion_bytes=37656;portion_raw_bytes=64000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:70;event=portion_stats_updated;type=ADD;path_id=1;portion=105;before_size=3910688;after_size=3948344;before_rows=101000;after_rows=102000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine_logs.cpp:112;event=update_portion;blobs_size=37656;portion_bytes=37656;portion_raw_bytes=64000; |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpJoin::FullOuterJoin2 [GOOD] >> KqpJoin::FullOuterJoinSizeCheck >> KqpJoinOrder::FiveWayJoinStatsOverride-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds+StreamLookupJoin-ColumnStore |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup >> KqpJoinOrder::TestJoinHint1-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |87.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |88.0%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |88.0%| [TA] $(B)/ydb/core/sys_view/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::OneTier [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerLegacy >> KqpJoinOrder::TPCDS16+StreamLookupJoin-ColumnStore [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> KqpCost::Range [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> KqpJoinOrder::TPCDS16-StreamLookupJoin+ColumnStore >> Viewer::JsonStorageListingV1PDiskIdFilter >> KqpJoinOrder::TPCH11-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCH21+StreamLookupJoin-ColumnStore >> KqpCost::ScanQueryRangeFullScan-SourceRead >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> OlapEstimationRowsCorrectness::TPCH11 >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli >> KqpJoinOrder::FiveWayJoin-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |88.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |88.0%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTier [GOOD] Test command err: 2024-11-21T23:21:32.597735Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:21:32.696348Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvBoot 2024-11-21T23:21:32.702013Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvRestored 2024-11-21T23:21:32.702487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:21:32.726015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:21:32.726115Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:21:32.727615Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:21:32.736295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:21:32.736552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:21:32.736800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:21:32.736926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:21:32.737039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:21:32.737141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:21:32.737294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:21:32.737416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:21:32.737519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:21:32.737661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:21:32.737804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:21:32.737936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:21:32.769793Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:100:2134], Recipient [1:136:2168]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2024-11-21T23:21:32.770142Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 2146435086, Sender [1:136:2168], Recipient [1:136:2168]: NKikimr::NColumnShard::TEvPrivate::TEvTieringModified 2024-11-21T23:21:32.770219Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:21:32.770298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:21:32.774160Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:21:32.774442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:21:32.774499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:21:32.774665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:32.774843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:21:32.774920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:21:32.774961Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:21:32.775088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:21:32.775187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:21:32.775233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:21:32.775265Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:21:32.775443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:32.775509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:21:32.775551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:21:32.775601Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:21:32.775729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:21:32.775794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:21:32.775855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:21:32.775888Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:21:32.775962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:21:32.776003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:21:32.776030Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:21:32.776075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:21:32.776114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:21:32.776140Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:21:32.776317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=49; 2024-11-21T23:21:32.776412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2024-11-21T23:21:32.776501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2024-11-21T23:21:32.776634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=84; 2024-11-21T23:21:32.776796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:21:32.776867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:21:32.776906Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:21:32.777117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:21:32.777172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:21:32.777211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:21:32.777375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:21:32.777449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2 ... 4-11-21T23:21:39.764538Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2024-11-21T23:21:39.764710Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:39.764761Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=0; 2024-11-21T23:21:39.764808Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:21:39.765393Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:114;event=TEvTaskProcessedResult; 2024-11-21T23:21:39.765448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=merge.cpp:58;event=DoApply;interval_idx=0; 2024-11-21T23:21:39.765508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:20;event=interval_result_received;interval_idx=0;intervalId=2; 2024-11-21T23:21:39.765569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:44;event=interval_result;interval_idx=0;count=80000;merger=0;interval_id=2; 2024-11-21T23:21:39.765637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=scanner.cpp:62;event=intervals_finished; 2024-11-21T23:21:39.765756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:39.765797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=1;count=80000;finished=1; 2024-11-21T23:21:39.765839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:203;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2024-11-21T23:21:39.766070Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:21:39.766290Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:39.766352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=plain_read_data.cpp:73;event=DoExtractReadyResults;result=0;count=0;finished=1; 2024-11-21T23:21:39.766514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:234;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=80000; 2024-11-21T23:21:39.766590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:254;stage=data_format;batch_size=640000;num_rows=80000;batch_columns=timestamp; 2024-11-21T23:21:39.766733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:356:2367] send ScanData to [1:351:2362] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 640000 rows: 80000 page faults: 0 finished: 0 pageFault: 0 arrow schema: timestamp: timestamp[us] 2024-11-21T23:21:39.766889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:269;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:39.767021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:39.767147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:39.767345Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:21:39.767477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:39.767607Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:39.767652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:356:2367] finished for tablet 9437184 2024-11-21T23:21:39.767763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:356:2367] send ScanData to [1:351:2362] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:21:39.768319Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:356:2367] and sent to [1:351:2362] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.178},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.18}],"full":{"a":1732231299587293,"name":"_full_task","f":1732231299587293,"d_finished":0,"c":0,"l":1732231299767823,"d":180530},"events":[{"name":"bootstrap","f":1732231299588089,"d_finished":4439,"c":1,"l":1732231299592528,"d":4439},{"a":1732231299767314,"name":"ack","f":1732231299766041,"d_finished":1134,"c":1,"l":1732231299767175,"d":1643},{"a":1732231299767290,"name":"processing","f":1732231299593856,"d_finished":8989,"c":5,"l":1732231299767177,"d":9522},{"name":"ProduceResults","f":1732231299590852,"d_finished":7249,"c":8,"l":1732231299767637,"d":7249},{"a":1732231299767639,"name":"Finish","f":1732231299767639,"d_finished":0,"c":0,"l":1732231299767823,"d":184},{"name":"task_result","f":1732231299593881,"d_finished":7715,"c":4,"l":1732231299765902,"d":7715}],"id":"9437184::3"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) 2024-11-21T23:21:39.768450Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:21:39.585918Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=6315200;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=6315200;selected_rows=0; 2024-11-21T23:21:39.768508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:21:39.768674Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.171811s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.164758s;size=0.0063152;details={columns=1;};};]};; 2024-11-21T23:21:39.768787Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:356:2367];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:97;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:64;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 64400, MsgBus: 16483 2024-11-21T23:21:35.497508Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876761404165737:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:35.498029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001b44/r3tmp/tmpv9S2Ps/pdisk_1.dat 2024-11-21T23:21:35.979531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:35.979625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:35.981713Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:35.985023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64400, node 1 2024-11-21T23:21:36.066914Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:36.066938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:36.066944Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:36.067049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16483 TClient is connected to server localhost:16483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:36.727544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:36.761478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:36.915342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:21:37.072265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:21:37.170618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:39.208730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876778584036634:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:39.208808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:39.465017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:39.535176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:39.647311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:39.761251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:39.820509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:39.918240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.015257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876782879004439:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.015355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.015569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876782879004444:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.021107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:40.041776Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:21:40.042459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876782879004446:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:40.495442Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876761404165737:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:40.495521Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |88.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 10387, MsgBus: 31922 2024-11-21T23:21:35.895465Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876764444622984:2148];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001b33/r3tmp/tmp6YF73p/pdisk_1.dat 2024-11-21T23:21:36.259198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:21:36.534872Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:36.558729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:36.558851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:36.564946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10387, node 1 2024-11-21T23:21:36.726942Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:36.726972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:36.726980Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:36.727054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31922 TClient is connected to server localhost:31922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:37.902051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:37.937310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:38.133326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:38.378504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:38.459696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:41.063119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876785919461057:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.063272Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876764444622984:2148];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:41.079562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.079643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:41.116545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.169733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.218222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.281033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.328940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.391343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.451869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876790214428853:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.451937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.452284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876790214428858:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.456176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:41.469026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876790214428860:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:43.255062Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231303180, txId: 281474976710671] shutting down |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] >> BasicUsage::WriteSessionSwitchDatabases [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15890, MsgBus: 9454 2024-11-21T23:21:38.713869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876776845959057:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:38.713923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001b19/r3tmp/tmpYLICDq/pdisk_1.dat 2024-11-21T23:21:39.360877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:39.360969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:39.364217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15890, node 1 2024-11-21T23:21:39.456658Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:39.490560Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:21:39.494634Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2024-11-21T23:21:39.590031Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:39.590059Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:39.590065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:39.590150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9454 TClient is connected to server localhost:9454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:40.220431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:40.236446Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:21:40.241138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:40.423868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:40.610101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:40.722455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:43.322141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876794025829894:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:43.353451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:43.461472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.550476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.636626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.688396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.730370Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876776845959057:2119];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:43.730475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:43.763026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.821758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.970654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876798320797704:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:43.970745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:43.970983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876798320797709:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:43.975598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:43.998714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876798320797711:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |88.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants >> KqpCost::RangeFullScan >> KqpJoin::FullOuterJoinSizeCheck [GOOD] >> KqpJoin::FullOuterJoinNotNullJoinKey |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 32264, MsgBus: 18961 2024-11-21T23:20:47.646796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876555055419123:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000dfb/r3tmp/tmps6oY0I/pdisk_1.dat 2024-11-21T23:20:48.092084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:20:48.429153Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:48.435753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:48.435843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:48.444662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32264, node 1 2024-11-21T23:20:48.768408Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:48.768432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:48.768439Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:48.768550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18961 TClient is connected to server localhost:18961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:50.116278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:50.135375Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:50.149335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:50.396018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:20:50.664420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:20:50.767104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:52.619271Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876555055419123:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:52.619340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:53.319631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876580825224476:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:53.319766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:53.678222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.729632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.782088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.837314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.879690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:53.932096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:54.009930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876585120192271:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:54.010008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:54.010868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876585120192276:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:54.014938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:54.031380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876585120192278:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:55.598792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:55.691206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:55.758701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:55.806312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:55.862792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:55.955622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17261, MsgBus: 18050 2024-11-21T23:20:59.288661Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876607148524998:2201];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000dfb/r3tmp/tmpO0BUs4/pdisk_1.dat 2024-11-21T23:20:59.368615Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:20:59.491219Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:59.491571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:59.491656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:59.494174Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17261, node 2 2024-11-21T23:20:59.694980Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:59.695004Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:59.695010Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:59.695118Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18050 TClient is connected to server localhost:18050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:00.431171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo ... :0, at schemeshard: 72057594046644480 2024-11-21T23:21:30.511406Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:30.612923Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:30.672846Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:30.784963Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876743535058094:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:30.785107Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:30.786690Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876743535058099:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:30.791590Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:30.816803Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876743535058101:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:32.221715Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:32.281908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:32.325927Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:21:32.376352Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:21:32.419861Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:21:32.461126Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63173, MsgBus: 15042 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000dfb/r3tmp/tmpx8ENOJ/pdisk_1.dat 2024-11-21T23:21:35.178534Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:21:35.199366Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:35.275948Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:35.280811Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:35.295490Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63173, node 5 2024-11-21T23:21:35.587031Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:35.587056Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:35.587066Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:35.587220Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15042 TClient is connected to server localhost:15042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:36.553471Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:36.573212Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:21:36.589895Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:36.712971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:36.995377Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:37.092315Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:40.638088Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876786799224409:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.638196Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.668537Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.760126Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.807085Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.894833Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.938592Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.023893Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.137482Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876791094192212:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.137608Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.138013Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876791094192218:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.143226Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:41.157595Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876791094192220:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:21:42.885213Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.933199Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.998153Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.096568Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.156823Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.246842Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/client/ydb_federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2024-11-21T23:19:07.170469Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1732231147170437 2024-11-21T23:19:07.547222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876127579063730:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:07.551414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:07.660918Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876125964490974:2097];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:07.885914Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache created 2024-11-21T23:19:07.904971Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:07.925555Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0012f3/r3tmp/tmpk6V84r/pdisk_1.dat 2024-11-21T23:19:08.327875Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:08.334777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:08.334887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:08.366661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:08.366734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:08.370250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:08.373363Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:19:08.374268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18233, node 1 2024-11-21T23:19:08.733530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/dl5p/0012f3/r3tmp/yandex9ZLPIP.tmp 2024-11-21T23:19:08.733551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/dl5p/0012f3/r3tmp/yandex9ZLPIP.tmp 2024-11-21T23:19:08.733697Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/dl5p/0012f3/r3tmp/yandex9ZLPIP.tmp 2024-11-21T23:19:08.733800Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:19:08.778842Z INFO: TTestServer started on Port 64124 GrpcPort 18233 TClient is connected to server localhost:64124 PQClient connected to localhost:18233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:09.170954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2024-11-21T23:19:09.314831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:12.007495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876143144360397:2281], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.007715Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.011195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7439876147439327726:2285], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.042959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2024-11-21T23:19:12.051598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876149053901047:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.055534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.062743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876149053901085:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:12.309983Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7439876147439327729:2286], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2024-11-21T23:19:12.761178Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7439876125964490974:2097];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:12.761245Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:12.854820Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876127579063730:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:12.855104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:12.864290Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7439876149053901164:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:19:12.863015Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7439876147439327764:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2024-11-21T23:19:12.866713Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzE0ZDJjYmMtNTE4NDQxYTEtYjU4NjkzZmUtZTEwNTM3NjQ=, ActorId: [2:7439876143144360392:2279], ActorState: ExecuteState, TraceId: 01jd8gbgbye8q44hws641zkryw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:19:12.868861Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:19:12.891936Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjAwNjk0N2EtYmMxZWVlNGItZGEzMzZhMmUtNmE0MTQ5NWU=, ActorId: [1:7439876149053901042:2301], ActorState: ExecuteState, TraceId: 01jd8gbgdg3ez31vy4s4x41ed1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2024-11-21T23:19:12.892477Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2024-11-21T23:19:12.896866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.295764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:13.491315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:18233", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2024-11-21T23:19:13.830240Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jd8gbhz44qpbz03gc3h8yxp0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWFjM2ZlOTQtNjkzNWVjZjItNDU5MTg0MzEtOWEzMDhlY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7439876157643836209:3001] === CheckClustersList. Ok 2024-11-21T23:19:19.545316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28 ... > 2024-11-21T23:21:24.758911Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:21:24.759005Z node 4 :PERSQUEUE INFO: new Cookie src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src_id 2024-11-21T23:21:24.759124Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2024-11-21T23:21:24.759182Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:21:24.763186Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:21:24.763240Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:21:24.763428Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2024-11-21T23:21:24.765012Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0 2024-11-21T23:21:24.768542Z :DEBUG: [/Root] SessionId [] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:21:24.768616Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1732231284768 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2024-11-21T23:21:24.768731Z :INFO: [/Root] SessionId [] MessageGroupId [src_id] Write session established. Init response: last_seq_no: 2 session_id: "src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0" supported_codecs { codecs: 1 codecs: 2 codecs: 3 } 2024-11-21T23:21:25.766670Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876717183456013:3351] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2024-11-21T23:21:25.793121Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876717183456013:3351] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2024-11-21T23:21:25.793158Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7439876717183456013:3351] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2024-11-21T23:21:28.402896Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:21:33.406429Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:21:38.409486Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:21:43.193241Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2024-11-21T23:21:43.193300Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2024-11-21T23:21:43.200043Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2024-11-21T23:21:43.200180Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2024-11-21T23:21:43.198834Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPersQueue::TEvStatus 2024-11-21T23:21:43.199140Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2024-11-21T23:21:43.410859Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvPartitionCounters PartitionId 0 2024-11-21T23:21:44.911273Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent 2024-11-21T23:21:44.912162Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session: try to update token 2024-11-21T23:21:44.912209Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 >>> Ready to answer: ok 2024-11-21T23:21:44.923124Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:21:44.914371Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0 grpc read done: success: 1 data: write_request[data omitted] 2024-11-21T23:21:44.923179Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:21:44.923284Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2024-11-21T23:21:44.924207Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2024-11-21T23:21:44.924229Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2024-11-21T23:21:44.924295Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2024-11-21T23:21:44.924518Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2024-11-21T23:21:44.928716Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2024-11-21T23:21:44.929383Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1732231304927 2024-11-21T23:21:44.929597Z node 4 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2024-11-21T23:21:44.914697Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2024-11-21T23:21:44.923710Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T23:21:44.936087Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 2024-11-21T23:21:44.936163Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2024-11-21T23:21:44.936218Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2024-11-21T23:21:44.936441Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] topic 'rt3.dc1--test-topicCounters. CacheSize 0 CachedBlobs 0 2024-11-21T23:21:44.936475Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2024-11-21T23:21:44.937062Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2024-11-21T23:21:44.938076Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2024-11-21T23:21:44.938270Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 8000000 } min_queue_wait_time { nanos: 12000000 } max_queue_wait_time { nanos: 12000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2024-11-21T23:21:44.938300Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2024-11-21T23:21:44.938325Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session: acknoledged message 1 2024-11-21T23:21:44.944941Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0 grpc read done: success: 0 data: 2024-11-21T23:21:44.946080Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2024-11-21T23:21:44.946129Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7439876717183456042:3351] destroyed 2024-11-21T23:21:44.946178Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2024-11-21T23:21:44.954846Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2024-11-21T23:21:44.955010Z :ERROR: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2024-11-21T23:21:44.955049Z :ERROR: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2024-11-21T23:21:44.955077Z :INFO: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session will now close 2024-11-21T23:21:44.955153Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session: aborting 2024-11-21T23:21:44.944975Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0 grpc read failed 2024-11-21T23:21:44.945002Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0 grpc closed 2024-11-21T23:21:44.945021Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0 is DEAD 2024-11-21T23:21:44.945535Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2024-11-21T23:21:44.997841Z :DEBUG: [/Root] SessionId [src_id|19cb3de9-ffa26989-4fd1b8f6-91eeca05_0] MessageGroupId [src_id] Write session: destroy 2024-11-21T23:21:45.754887Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876807377770322:3546], TxId: 281474976715891, task: 1, CA Id [3:7439876807377770320:3546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2024-11-21T23:21:45.795179Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876807377770322:3546], TxId: 281474976715891, task: 1, CA Id [3:7439876807377770320:3546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:21:45.887303Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876807377770322:3546], TxId: 281474976715891, task: 1, CA Id [3:7439876807377770320:3546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2024-11-21T23:21:45.962520Z node 3 :KQP_COMPUTE WARN: SelfId: [3:7439876807377770322:3546], TxId: 281474976715891, task: 1, CA Id [3:7439876807377770320:3546]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 24726, MsgBus: 18606 2024-11-21T23:21:41.408443Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876787684248473:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:41.408644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001ae0/r3tmp/tmp5AoGSD/pdisk_1.dat 2024-11-21T23:21:41.923676Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24726, node 1 2024-11-21T23:21:41.948937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:41.949043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:41.954916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:21:42.065268Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:42.065295Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:42.065308Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:42.065409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18606 TClient is connected to server localhost:18606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:42.666448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:42.703083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:42.881048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:43.048863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:43.124251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:44.812458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876800569152060:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:44.812547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:45.182473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.235919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.347491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.417075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.476859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.572142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.681672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876804864119857:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:45.681751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:45.682000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876804864119862:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:45.685934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:45.708167Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:21:45.709408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876804864119864:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:46.431940Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876787684248473:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:46.432141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TColumnShardTestSchema::HotTiersTtl [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH2-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2024-11-21T23:19:50.261429Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:19:51.006047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:19:51.048551Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:19:51.048665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:19:51.049052Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:19:51.073648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:51.074082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:51.074415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:51.074546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:51.074676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:19:51.074815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:19:51.074928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:19:51.075050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:19:51.075181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:19:51.075303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:19:51.075412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:19:51.075536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:19:51.137233Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:19:51.137330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:19:51.154492Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:19:51.154824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:19:51.154883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:19:51.155084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:51.159410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:19:51.159650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:19:51.159723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:19:51.159884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:19:51.160032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:19:51.160086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:19:51.160123Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:19:51.160368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:19:51.160481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:19:51.160529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:19:51.160569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:19:51.160696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:19:51.160762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:19:51.160823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:19:51.160860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:19:51.160941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:19:51.160978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:19:51.161010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:19:51.161064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:19:51.161103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:19:51.161160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:19:51.161368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=68; 2024-11-21T23:19:51.161461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2024-11-21T23:19:51.161563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2024-11-21T23:19:51.161683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=62; 2024-11-21T23:19:51.161898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:19:51.161956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:19:51.162000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:19:51.162222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:19:51.162268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:19:51.162298Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:19:51.162466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2024-11-21T23:19:51.162524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2024-11-21T23:19:51.162561Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2024-11-21T23:19:51.162784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2024-11-21T23:19:51.162833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2024-11-21T23:19:51.162862Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execu ... oadingTime=16996; 2024-11-21T23:21:47.050630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=40741; 2024-11-21T23:21:47.050931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=219; 2024-11-21T23:21:47.052749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=215; 2024-11-21T23:21:47.052847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=1859; 2024-11-21T23:21:47.053022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=112; 2024-11-21T23:21:47.053172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:21:47.053232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=160; 2024-11-21T23:21:47.053344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=64; 2024-11-21T23:21:47.053431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=45; 2024-11-21T23:21:47.054199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=717; 2024-11-21T23:21:47.055637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1365; 2024-11-21T23:21:47.055830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=122; 2024-11-21T23:21:47.056002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=113; 2024-11-21T23:21:47.056073Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2024-11-21T23:21:47.056137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2024-11-21T23:21:47.056200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2024-11-21T23:21:47.056337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=58; 2024-11-21T23:21:47.056403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2024-11-21T23:21:47.056532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=80; 2024-11-21T23:21:47.056589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2024-11-21T23:21:47.056681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2024-11-21T23:21:47.056722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=56908; 2024-11-21T23:21:47.056946Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=25;blobs=50;rows=708348;bytes=40210220;raw_bytes=73294644; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=22;blobs=44;rows=1136652;bytes=64354976;raw_bytes=117832608; inactive portions=44;blobs=88;rows=1246652;bytes=70721288;raw_bytes=129233400; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:21:47.057078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1673:3649];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:21:47.057152Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:21:47.057262Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=0;new_count_ttls=0; 2024-11-21T23:21:47.057453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1673:3649];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:21:47.057507Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:21:47.057591Z node 1 :TX_COLUMNSHARD DEBUG: fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:21:47.057641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:21:47.057718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:21:47.057960Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=21; 2024-11-21T23:21:47.058036Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T23:21:47.058103Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:21:47.058171Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:21:47.058216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:21:47.058537Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:21:47.058673Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:21:47.059325Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:21:47.060331Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1706:3675];tablet_id=9437184;parent=[1:1673:3649];fline=manager.h:99;event=ask_data;request=request_id=281;1={portions_count=91};; 2024-11-21T23:21:47.061255Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:21:47.061345Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:1706:3675];tablet_id=9437184;parent=[1:1673:3649];fline=manager.h:99;event=ask_data;request=request_id=283;1={portions_count=47};; 2024-11-21T23:21:47.062938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:21:47.062984Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:21:47.063037Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:21:47.063093Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:21:47.063172Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:21:47.063407Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=21; 2024-11-21T23:21:47.063502Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2024-11-21T23:21:47.063554Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:21:47.063614Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:21:47.063696Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:21:47.063780Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:21:47.063897Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:21:47.064608Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=91;path_id=1; 2024-11-21T23:21:47.075403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=91;path_id=1; 2024-11-21T23:21:47.084782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:21:47.084897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:1673:3649];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 8949, MsgBus: 12389 2024-11-21T23:20:43.960626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876538286693837:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:43.961482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e22/r3tmp/tmpOBAkz0/pdisk_1.dat 2024-11-21T23:20:44.504843Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:44.508711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:44.514603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:44.519337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8949, node 1 2024-11-21T23:20:44.654920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:44.654944Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:44.654952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:44.655025Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12389 TClient is connected to server localhost:12389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:45.353672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:45.378820Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:45.399779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:45.637765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:45.849999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:46.006304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:49.055855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876559761531891:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:49.119874Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876538286693837:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:49.120219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:49.120312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:49.159159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:49.219921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:49.317433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:49.372331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:49.417798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:49.510805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:49.644145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876564056499688:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:49.644226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:49.644451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876564056499693:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:49.648212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:49.677110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876564056499695:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:51.402101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:51.427159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:51.509257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:51.554970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:51.611253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1746, MsgBus: 23323 2024-11-21T23:20:55.507569Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876590518454149:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:55.507630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e22/r3tmp/tmpfzn3bj/pdisk_1.dat 2024-11-21T23:20:55.732816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:55.732926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:55.734448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:20:55.736869Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1746, node 2 2024-11-21T23:20:55.862827Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:55.862851Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:55.862858Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:55.862940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23323 TClient is connected to server localhost:23323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:56.452100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:56.471707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itse ... TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:27.118927Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:27.250593Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876730676123693:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:27.250745Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:27.254736Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876730676123698:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:27.265490Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:27.297706Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876730676123701:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:28.907862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:28.947073Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:28.993924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:21:29.050282Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:21:29.102620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9805, MsgBus: 5611 2024-11-21T23:21:34.221211Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876757850177375:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:34.365233Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e22/r3tmp/tmpco30Gb/pdisk_1.dat 2024-11-21T23:21:34.481249Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:34.523840Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:34.523965Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:34.528340Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9805, node 5 2024-11-21T23:21:34.723223Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:34.723249Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:34.723261Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:34.723406Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5611 TClient is connected to server localhost:5611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:35.687398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:35.702818Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:21:35.721221Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:35.859909Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:36.117324Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:36.211256Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:39.165520Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876757850177375:2201];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:39.165599Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:40.047869Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876783619982707:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.048013Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.135701Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.205324Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.286036Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.352278Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.402948Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.491469Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:40.677909Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876783619983223:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.678019Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.678603Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876783619983228:2434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:40.684059Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:40.723067Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2024-11-21T23:21:40.724118Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876783619983230:2435], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:21:42.520187Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.585516Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.671429Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.737054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.793693Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231857.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231857.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231857.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231857.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231857.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112231857.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=132231857.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231857.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112230657.000000s;Name=;Codec=}; 2024-11-21T23:20:57.701120Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:20:57.813170Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:20:57.836385Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:20:57.836470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:20:57.836710Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:20:57.844212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:20:57.844411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:20:57.844669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:20:57.844784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:20:57.844879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:20:57.844976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:20:57.845060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:20:57.845166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:20:57.845287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:20:57.845387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:20:57.845484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:20:57.845567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:20:57.867452Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:20:57.867533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:20:57.873228Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:20:57.873455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:20:57.873505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:20:57.873669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:20:57.873829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:20:57.873899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:20:57.873938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:20:57.874027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:20:57.874088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:20:57.874153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:20:57.874202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:20:57.874367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:20:57.874465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:20:57.874510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:20:57.874544Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:20:57.874626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:20:57.874672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:20:57.874713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:20:57.874741Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:20:57.874802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:20:57.874835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:20:57.874865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:20:57.874917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:20:57.874952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:20:57.874977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:20:57.875157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2024-11-21T23:20:57.875238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2024-11-21T23:20:57.875315Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2024-11-21T23:20:57.875391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2024-11-21T23:20:57.875535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:20:57.875589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:20:57.875622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:20:57.875871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abst ... timestamp;);;;); 2024-11-21T23:21:49.193101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:910:2916] finished for tablet 9437184 2024-11-21T23:21:49.193174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:910:2916] send ScanData to [1:909:2915] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:21:49.193637Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:910:2916] and sent to [1:909:2915] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.01},{"events":["f_processing","f_task_result"],"t":0.011},{"events":["f_ack","l_task_result"],"t":0.343},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.347}],"full":{"a":1732231308845294,"name":"_full_task","f":1732231308845294,"d_finished":0,"c":0,"l":1732231309193221,"d":347927},"events":[{"name":"bootstrap","f":1732231308845511,"d_finished":9806,"c":1,"l":1732231308855317,"d":9806},{"a":1732231309192888,"name":"ack","f":1732231309188811,"d_finished":2954,"c":3,"l":1732231309192798,"d":3287},{"a":1732231309192875,"name":"processing","f":1732231308856674,"d_finished":217429,"c":24,"l":1732231309192801,"d":217775},{"name":"ProduceResults","f":1732231308847777,"d_finished":7812,"c":29,"l":1732231309193085,"d":7812},{"a":1732231309193087,"name":"Finish","f":1732231309193087,"d_finished":0,"c":0,"l":1732231309193221,"d":134},{"name":"task_result","f":1732231308856699,"d_finished":213888,"c":21,"l":1732231309188625,"d":213888}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:910:2916]->[1:909:2915] 2024-11-21T23:21:49.193727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:910:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:21:48.844849Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=4750028;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4750028;selected_rows=0; 2024-11-21T23:21:49.193768Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:910:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:21:49.193945Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:910:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.267172s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.321535s;size=0.002211944;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::SPEC;duration=0.029142s;size=0.00128;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};};{name=ASSEMBLER::LAST_PK;duration=0.016093s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:21:49.194024Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:910:2916];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:21:49.196311Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2024-11-21T23:21:49.196534Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000011:max} readable: {1000000011:max} at tablet 9437184 2024-11-21T23:21:49.196669Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T23:21:49.196862Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T23:21:49.196942Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T23:21:49.197510Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000011:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:926:2932];trace_detailed=; 2024-11-21T23:21:49.197934Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T23:21:49.198152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:21:49.198336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:49.198508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:49.198752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:21:49.198864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:49.198995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:21:49.199042Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:926:2932] finished for tablet 9437184 2024-11-21T23:21:49.199136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:926:2932] send ScanData to [1:925:2931] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:21:49.199573Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:926:2932] and sent to [1:925:2931] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1732231309197442,"name":"_full_task","f":1732231309197442,"d_finished":0,"c":0,"l":1732231309199188,"d":1746},"events":[{"name":"bootstrap","f":1732231309197630,"d_finished":919,"c":1,"l":1732231309198549,"d":919},{"a":1732231309198728,"name":"ack","f":1732231309198728,"d_finished":0,"c":0,"l":1732231309199188,"d":460},{"a":1732231309198710,"name":"processing","f":1732231309198710,"d_finished":0,"c":0,"l":1732231309199188,"d":478},{"name":"ProduceResults","f":1732231309198248,"d_finished":539,"c":2,"l":1732231309199027,"d":539},{"a":1732231309199029,"name":"Finish","f":1732231309199029,"d_finished":0,"c":0,"l":1732231309199188,"d":159}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:926:2932]->[1:925:2931] 2024-11-21T23:21:49.199680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:21:49.197014Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T23:21:49.199750Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:21:49.199804Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T23:21:49.199912Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:926:2932];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |88.1%| [TA] $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TIncrHugeBlobIdDict::Basic [GOOD] >> TIncrHugeBasicTest::Recovery [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 9807, MsgBus: 11131 2024-11-21T23:21:43.306601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876797803301300:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:43.322651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001ade/r3tmp/tmpRd9eVg/pdisk_1.dat 2024-11-21T23:21:43.919945Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:43.944652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:43.944745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:43.955921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9807, node 1 2024-11-21T23:21:44.111032Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:44.111057Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:44.111064Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:44.111164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11131 TClient is connected to server localhost:11131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:44.824188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:44.871601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:45.100107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:45.393331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:45.533244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:47.948503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876814983172185:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:47.948627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:48.323071Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876797803301300:2056];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:48.323128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:48.336313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:48.420108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:48.488671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:48.552051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:48.593855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:48.648229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:48.763018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876819278139986:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:48.763103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:48.763469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876819278139991:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:48.768190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:48.780650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876819278139993:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:49.833114Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2024-11-21T23:21:49.909357Z, after 0.077295s 2024-11-21T23:21:49.833420Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:21:49.860557Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2024-11-21T23:21:49.956423Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: batching, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732231309 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2024-11-21T23:21:50.005914Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037889 2024-11-21T23:21:50.006239Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037888 2024-11-21T23:21:50.015512Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037889, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.016211Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037888, for tableId 2: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.034201Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037889, for tableId 2 2024-11-21T23:21:50.034251Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037888, for tableId 2 2024-11-21T23:21:50.104424Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7439876827868074936:2463] 2024-11-21T23:21:50.104459Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7439876823573107602:2463] 2024-11-21T23:21:50.105493Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037914 2024-11-21T23:21:50.105618Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710671 at tablet 72075186224037914 2024-11-21T23:21:50.114007Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037914 2024-11-21T23:21:50.122420Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710671 at step 1732231310159 at tablet 72075186224037914 { Transactions { TxId: 281474976710671 AckTo { RawX1: 7439876797803301747 RawX2: 4294969515 } } Step: 1732231310159 MediatorID: 72057594046382081 TabletID: 72075186224037914 } 2024-11-21T23:21:50.122472Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2024-11-21T23:21:50.122637Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037914 2024-11-21T23:21:50.122658Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:21:50.122690Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1732231310159:281474976710671] in PlanQueue unit at 72075186224037914 2024-11-21T23:21:50.122864Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037914 loaded tx from db 1732231310159:281474976710671 keys extracted: 0 2024-11-21T23:21:50.123185Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:21:50.124159Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037914 step# 1732231310159 txid# 281474976710671} 2024-11-21T23:21:50.124191Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037914 step# 1732231310159} 2024-11-21T23:21:50.124231Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037914 2024-11-21T23:21:50.124289Z node 1 :TX_DATASHARD DEBUG: Complete [1732231310159 : 281474976710671] from 72075186224037914 at tablet 72075186224037914 send result to client [1:7439876827868074938:3654], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:21:50.124314Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2024-11-21T23:21:50.125365Z ... Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2024-11-21T23:21:50.146071Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876827868074954:2471], TxId: 281474976710672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jd8ggagd3h5vwgxj42w0d6ww. SessionId : ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T23:21:50.146203Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2024-11-21T23:21:50.146278Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876827868074955:2472], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=. TraceId : 01jd8ggagd3h5vwgxj42w0d6ww. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:21:50.146298Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876827868074955:2472], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=. TraceId : 01jd8ggagd3h5vwgxj42w0d6ww. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T23:21:50.146320Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:21:50.146326Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T23:21:50.146360Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T23:21:50.146536Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876827868074948:2463] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggagd3h5vwgxj42w0d6ww, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439876827868074954:2471], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 5819 DurationUs: 16000 Tasks { TaskId: 1 CpuTimeUs: 950 FinishTimeMs: 1732231310146 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 114 BuildCpuTimeUs: 836 WaitInputTimeUs: 385 HostName: "ghrun-uc25mmfz34" NodeId: 1 StartTimeMs: 1732231310130 } MaxMemoryUsage: 1048576 } 2024-11-21T23:21:50.146567Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jd8ggagd3h5vwgxj42w0d6ww, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439876827868074954:2471] 2024-11-21T23:21:50.146616Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876827868074948:2463] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggagd3h5vwgxj42w0d6ww, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7439876827868074955:2472], 2024-11-21T23:21:50.146643Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T23:21:50.146789Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876827868074948:2463] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggagd3h5vwgxj42w0d6ww, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7439876823573107602:2463], seqNo: 1, nRows: 1 2024-11-21T23:21:50.147408Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7439876827868074958:2472] 2024-11-21T23:21:50.147453Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876827868074955:2472], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=. TraceId : 01jd8ggagd3h5vwgxj42w0d6ww. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:21:50.147475Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876827868074955:2472], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=. TraceId : 01jd8ggagd3h5vwgxj42w0d6ww. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T23:21:50.147495Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T23:21:50.147504Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2024-11-21T23:21:50.147516Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876827868074955:2472], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=. TraceId : 01jd8ggagd3h5vwgxj42w0d6ww. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T23:21:50.147597Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2024-11-21T23:21:50.147673Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:21:50.147879Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T23:21:50.148057Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876827868074948:2463] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggagd3h5vwgxj42w0d6ww, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439876827868074955:2472], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 12142 DurationUs: 17000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 835 FinishTimeMs: 1732231310147 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 214 BuildCpuTimeUs: 621 WaitInputTimeUs: 14357 HostName: "ghrun-uc25mmfz34" NodeId: 1 StartTimeMs: 1732231310130 } MaxMemoryUsage: 1048576 } 2024-11-21T23:21:50.148090Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jd8ggagd3h5vwgxj42w0d6ww, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439876827868074955:2472] 2024-11-21T23:21:50.148178Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876827868074948:2463] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggagd3h5vwgxj42w0d6ww, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:21:50.148214Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876827868074948:2463] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggagd3h5vwgxj42w0d6ww, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI2YTY4NjMtMWNlNDBiMDctMzM5NTY0NDAtZWU3MTkyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.017961s ReadRows: 3 ReadBytes: 96 ru: 11 rate limiter was not found force flag: 1 2024-11-21T23:21:50.148859Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231310159, txId: 281474976710671] shutting down 2024-11-21T23:21:50.262915Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037890 2024-11-21T23:21:50.263186Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037897 2024-11-21T23:21:50.263667Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037891 2024-11-21T23:21:50.263895Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037890, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.264073Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037890, for tableId 3 2024-11-21T23:21:50.264080Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037897, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.264099Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037897, for tableId 3 2024-11-21T23:21:50.264510Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037891, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.264810Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037891, for tableId 3 2024-11-21T23:21:50.269439Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037892 2024-11-21T23:21:50.269662Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037893 2024-11-21T23:21:50.270315Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037892, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.270808Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037892, for tableId 3 2024-11-21T23:21:50.270919Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037893, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.271130Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037893, for tableId 3 2024-11-21T23:21:50.274707Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037895 2024-11-21T23:21:50.279082Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037895, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.279368Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037895, for tableId 3 2024-11-21T23:21:50.289622Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037894 2024-11-21T23:21:50.302646Z node 1 :TX_DATASHARD DEBUG: UpdateTableStats at datashard 72075186224037896 2024-11-21T23:21:50.308759Z node 1 :TX_DATASHARD DEBUG: BuildStats result at datashard 72075186224037894, for tableId 3: RowCount 0, DataSize 0, IndexSize 0, PartCount 0, LoadedSize 0, Spent{0.000s wa 0.000s cnt 0}, HistogramKeys 0 2024-11-21T23:21:50.309632Z node 1 :TX_DATASHARD DEBUG: BuildStats result received at datashard 72075186224037894, for tableId 3 |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Recovery [GOOD] |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBlobIdDict::Basic [GOOD] >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] >> TIncrHugeBasicTest::Defrag |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7146, MsgBus: 22105 2024-11-21T23:19:51.925531Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876315473599628:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:51.925660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000f5c/r3tmp/tmplaT4TN/pdisk_1.dat 2024-11-21T23:19:52.321207Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:52.350518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:52.350620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:52.352096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7146, node 1 2024-11-21T23:19:52.532335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:52.532359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:52.532368Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:52.532496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22105 TClient is connected to server localhost:22105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:19:53.213747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.247271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.408092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.578077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:53.661093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:19:55.690608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876332653470326:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:55.690733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.119955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.200612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.236745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.311386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.420943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.522159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:19:56.638555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876336948438136:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.638681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.639015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876336948438141:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:56.642935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:19:56.655620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:19:56.656071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876336948438143:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:19:56.935697Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876315473599628:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:56.935881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:19:58.004493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.076309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.116518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.154883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.188132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.363856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.440132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.516309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.565472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.601745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.640898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.729278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T23:19:58.781521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.360046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T23:19:59.425198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.470540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.507024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.551235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T23:19:59.593718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ... access permissions } 2024-11-21T23:21:39.406987Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:39.427840Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876779847686284:2436], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:41.293987Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.410959Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.505977Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.612350Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.685240Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.927172Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:21:41.986850Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.080652Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.174833Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.231786Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.304149Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.377695Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.446429Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.273038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T23:21:43.367643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.465648Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.540328Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.611946Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.671460Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.742802Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.824230Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.881971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2024-11-21T23:21:43.965635Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.049604Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.120427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.173313Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.251436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.307906Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.386333Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.459634Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.521127Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.616095Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.705922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.843141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.957858Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.038018Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.096779Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.532936Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:1, at schemeshard: 72057594046644480 2024-11-21T23:21:45.637572Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.724675Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.782954Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.855650Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2024-11-21T23:21:45.961229Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2024-11-21T23:21:46.063894Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2024-11-21T23:21:46.158190Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2024-11-21T23:21:46.254928Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2024-11-21T23:21:46.514091Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2024-11-21T23:21:46.585896Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2024-11-21T23:21:46.656600Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2024-11-21T23:21:46.745776Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2024-11-21T23:21:47.686649Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:21:47.686684Z node 5 :IMPORT WARN: Table profiles were not loaded |88.2%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> KqpJoinOrder::TPCDS9_SMALL-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS9_SMALL-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TPCDS92-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92+StreamLookupJoin-ColumnStore |88.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/client/ydb_federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::FiveWayJoinWithComplexPreds+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-StreamLookupJoin+ColumnStore >> KqpJoinOrder::TestJoinHint2+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinHint2-StreamLookupJoin+ColumnStore >> KqpCost::RangeFullScan [GOOD] >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::RangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 30144, MsgBus: 25817 2024-11-21T23:21:47.859149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876813774211237:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:47.859189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001ab0/r3tmp/tmpn67eGV/pdisk_1.dat 2024-11-21T23:21:48.526803Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:48.543566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:48.543704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:48.550129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30144, node 1 2024-11-21T23:21:48.744737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:48.744769Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:48.744778Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:48.744890Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25817 TClient is connected to server localhost:25817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:49.704211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:49.735602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:49.998240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:50.298787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:50.409451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:52.792020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876835249049419:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:52.792134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:52.862519Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876813774211237:2051];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:52.862589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:53.248965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.294232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.328672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.372484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.407723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.482128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.599216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876839544017225:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:53.599281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:53.599466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876839544017230:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:53.604262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:53.633873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876839544017232:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } query_phases { duration_us: 4909 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 3616 affected_shards: 1 } compilation { duration_us: 235126 cpu_time_us: 228270 } process_cpu_time_us: 216 total_duration_us: 244093 total_cpu_time_us: 232102 |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |88.2%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] Test command err: Trying to start YDB, gRPC: 4182, MsgBus: 30208 2024-11-21T23:21:03.619745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876627308288195:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:03.619796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000df8/r3tmp/tmpvYzprC/pdisk_1.dat 2024-11-21T23:21:04.415499Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:04.456798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:04.456973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:04.471986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4182, node 1 2024-11-21T23:21:04.721686Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:04.721706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:04.721713Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:04.721800Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30208 TClient is connected to server localhost:30208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:06.490077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:06.523269Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:21:06.537237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:06.962155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:07.391085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:07.529812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:08.622546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876627308288195:2052];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:08.622623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:12.363364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876665962995588:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:12.363470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:13.023596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:13.075208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:13.126114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:13.170523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:13.206172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:13.287047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:13.372844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876670257963405:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:13.372897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:13.373458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876670257963410:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:13.377107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:13.395113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876670257963412:2443], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:15.267387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:15.339385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:15.387867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12876, MsgBus: 61500 2024-11-21T23:21:17.708251Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876685035075848:2184];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:17.708315Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000df8/r3tmp/tmpRNBJkL/pdisk_1.dat 2024-11-21T23:21:18.115275Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:18.167818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:18.167908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:18.172311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12876, node 2 2024-11-21T23:21:18.347085Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:18.347111Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:18.347119Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:18.347232Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61500 TClient is connected to server localhost:61500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:19.165161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:19.205475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:21:19.317220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2024-11-21T23:21:19.830551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part propo ... main>: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:41.954680Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:42.024307Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.088397Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.141114Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.183185Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.278054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.439837Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:42.524065Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876792040014880:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:42.524176Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:42.524454Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7439876792040014885:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:42.531500Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:42.545461Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7439876792040014887:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:21:44.459160Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.523057Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2024-11-21T23:21:44.572678Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12333, MsgBus: 61504 2024-11-21T23:21:47.964623Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876815505072149:2190];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000df8/r3tmp/tmpS0q5aT/pdisk_1.dat 2024-11-21T23:21:48.047173Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:21:48.096071Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:48.166359Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:48.168190Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:48.172783Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12333, node 5 2024-11-21T23:21:48.279110Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:48.279138Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:48.279149Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:48.279284Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61504 TClient is connected to server localhost:61504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:49.128801Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:49.142658Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2024-11-21T23:21:49.161696Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:49.280348Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:49.523288Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:49.635338Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:52.954176Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876815505072149:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:52.954231Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:53.271766Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876841274877483:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:53.271889Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:53.345780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.441624Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.494505Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.550868Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.621447Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.716910Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:53.842471Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876841274877989:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:53.842577Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:53.842843Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876841274877994:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:53.848551Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:53.860328Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876841274877996:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2024-11-21T23:21:55.277988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2024-11-21T23:21:55.333607Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/benchmark |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/benchmark |88.2%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/benchmark >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/meta/bin/mvp_meta |88.2%| [LD] {RESULT} $(B)/ydb/mvp/meta/bin/mvp_meta |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/meta/bin/mvp_meta |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/pgwire/pgwire |88.3%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 1983, MsgBus: 26019 2024-11-21T23:21:52.434626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876836166640011:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:52.434683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001a33/r3tmp/tmpH0ijf9/pdisk_1.dat 2024-11-21T23:21:52.982130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:21:52.982224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:21:52.988481Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:21:52.992184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1983, node 1 2024-11-21T23:21:53.129852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:21:53.129877Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:21:53.129886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:21:53.130757Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26019 TClient is connected to server localhost:26019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:21:53.898110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:53.924483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:54.065096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:21:54.247217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:21:54.339166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:21:56.389680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876853346510717:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:56.394603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:56.654022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:21:56.689100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:21:56.782606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:21:56.850847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:21:56.892136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:21:56.938194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:21:57.002846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876857641478515:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:57.002926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:57.004554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876857641478520:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:21:57.008502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:21:57.019365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876857641478522:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:21:57.431991Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876836166640011:2255];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:21:57.432072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:21:58.203521Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2024-11-21T23:21:58.523344Z, after 0.322608s 2024-11-21T23:21:58.207820Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2024-11-21T23:21:58.231722Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2024-11-21T23:21:58.527936Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: batching, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1732231318 AvailableComputeActors: 10000 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 10000 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2024-11-21T23:21:58.574440Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7439876861936446194:2460] 2024-11-21T23:21:58.574474Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7439876861936446136:2460] 2024-11-21T23:21:58.575600Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037914 2024-11-21T23:21:58.575760Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710671 at tablet 72075186224037914 2024-11-21T23:21:58.577058Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037914 2024-11-21T23:21:58.584421Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710671 at step 1732231318622 at tablet 72075186224037914 { Transactions { TxId: 281474976710671 AckTo { RawX1: 7439876836166640291 RawX2: 4294969524 } } Step: 1732231318622 MediatorID: 72057594046382081 TabletID: 72075186224037914 } 2024-11-21T23:21:58.584457Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2024-11-21T23:21:58.584582Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037914 2024-11-21T23:21:58.584599Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 active 0 active planned 0 immediate 0 planned 1 2024-11-21T23:21:58.584626Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1732231318622:281474976710671] in PlanQueue unit at 72075186224037914 2024-11-21T23:21:58.584980Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037914 loaded tx from db 1732231318622:281474976710671 keys extracted: 0 2024-11-21T23:21:58.585548Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037914 (dry run) active 0 active planned 0 immediate 0 planned 0 2024-11-21T23:21:58.586323Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 72075186224037914 step# 1732231318622 txid# 281474976710671} 2024-11-21T23:21:58.586366Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037914 step# 1732231318622} 2024-11-21T23:21:58.586421Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037914 2024-11-21T23:21:58.586467Z node 1 :TX_DATASHARD DEBUG: Complete [1732231318622 : 281474976710671] from 72075186224037914 at tablet 72075186224037914 send result to client [1:7439876861936446196:3679], exec latency: 0 ms, propose latency: 1 ms 2024-11-21T23:21:58.586488Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037914 2024-11-21T23:21:58.587408Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1732231318622:281474976710671 created 2024-11-21T23:21:58.587884Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876861936446205:2460] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2024-11-21T23:21:58.587952Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2024-11-21T23:21:58.587966Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2024-11-21T23:21:58.588238Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2024-11-21T23:21:58.588409Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [ ... turned async data processed rows 3 left freeSpace 8388548 received rows 3 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2024-11-21T23:21:58.628047Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446209:2470], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CustomerSuppliedId : . TraceId : 01jd8ggjp17amxm4f40try12fc. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:21:58.628084Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446209:2470], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CustomerSuppliedId : . TraceId : 01jd8ggjp17amxm4f40try12fc. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T23:21:58.628136Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2024-11-21T23:21:58.628156Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446210:2471], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd8ggjp17amxm4f40try12fc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2024-11-21T23:21:58.628186Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Finish input channelId: 1, from: [1:7439876861936446209:2470] 2024-11-21T23:21:58.628228Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446210:2471], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd8ggjp17amxm4f40try12fc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:21:58.629163Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446210:2471], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd8ggjp17amxm4f40try12fc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:21:58.629184Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446210:2471], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd8ggjp17amxm4f40try12fc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T23:21:58.629218Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T23:21:58.629232Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [1] 2024-11-21T23:21:58.629247Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446209:2470], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CustomerSuppliedId : . TraceId : 01jd8ggjp17amxm4f40try12fc. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646927 2024-11-21T23:21:58.629280Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446209:2470], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CustomerSuppliedId : . TraceId : 01jd8ggjp17amxm4f40try12fc. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2024-11-21T23:21:58.629290Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446209:2470], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CustomerSuppliedId : . TraceId : 01jd8ggjp17amxm4f40try12fc. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. All outputs have been finished. Consider finished 2024-11-21T23:21:58.629300Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2024-11-21T23:21:58.629319Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446209:2470], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CustomerSuppliedId : . TraceId : 01jd8ggjp17amxm4f40try12fc. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2024-11-21T23:21:58.629450Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2024-11-21T23:21:58.629582Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:21:58.629780Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T23:21:58.630116Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876861936446205:2460] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7439876861936446136:2460], seqNo: 1, nRows: 1 2024-11-21T23:21:58.630346Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876861936446205:2460] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439876861936446209:2470], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 30320 DurationUs: 39000 Tasks { TaskId: 1 CpuTimeUs: 5841 FinishTimeMs: 1732231318629 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 4737 BuildCpuTimeUs: 1104 WaitInputTimeUs: 6676 HostName: "ghrun-uc25mmfz34" NodeId: 1 StartTimeMs: 1732231318590 } MaxMemoryUsage: 1048576 } 2024-11-21T23:21:58.630379Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439876861936446209:2470] 2024-11-21T23:21:58.630436Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876861936446205:2460] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7439876861936446210:2471], 2024-11-21T23:21:58.630971Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7439876861936446212:2471] 2024-11-21T23:21:58.631016Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446210:2471], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd8ggjp17amxm4f40try12fc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2024-11-21T23:21:58.631035Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446210:2471], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd8ggjp17amxm4f40try12fc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. All outputs have been finished. Consider finished 2024-11-21T23:21:58.631055Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2024-11-21T23:21:58.631062Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2024-11-21T23:21:58.631072Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7439876861936446210:2471], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jd8ggjp17amxm4f40try12fc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2024-11-21T23:21:58.631130Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2024-11-21T23:21:58.631185Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:67;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2024-11-21T23:21:58.631323Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 1048576, Free Tier: 0, ExecutionUnits: 1. 2024-11-21T23:21:58.631471Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876861936446205:2460] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7439876861936446210:2471], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 9978 DurationUs: 24000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 1552 FinishTimeMs: 1732231318631 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 880 BuildCpuTimeUs: 672 WaitInputTimeUs: 12637 HostName: "ghrun-uc25mmfz34" NodeId: 1 StartTimeMs: 1732231318607 } MaxMemoryUsage: 1048576 } 2024-11-21T23:21:58.631492Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7439876861936446210:2471] 2024-11-21T23:21:58.631584Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876861936446205:2460] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2024-11-21T23:21:58.631626Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7439876861936446205:2460] TxId: 281474976710672. Ctx: { TraceId: 01jd8ggjp17amxm4f40try12fc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkzYjM0NjYtYmE5OTlhNTctZTdmMGQ2NzUtZDRhOTYyMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.040298s ReadRows: 1 ReadBytes: 20 ru: 26 rate limiter was not found force flag: 1 2024-11-21T23:21:58.632313Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1732231318622, txId: 281474976710671] shutting down |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::DatetimeConstantFold-StreamLookupJoin+ColumnStore |88.3%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCC |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |88.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> TKqpScanData::UnboxedValueSize |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns >> TKqpScanData::FailOnUnsupportedPgType >> TKqpScanData::UnboxedValueSize [GOOD] >> TKqpScanData::EmptyColumns [GOOD] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/local_ydb/local_ydb |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/local_ydb/local_ydb |88.4%| [LD] {RESULT} $(B)/ydb/public/tools/local_ydb/local_ydb >> KqpJoinOrder::CanonizedJoinOrderTPCDS64-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin-ColumnStore |88.4%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |88.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> KqpJoinOrder::FiveWayJoinWithPreds-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+StreamLookupJoin-ColumnStore |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |88.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |88.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut >> KqpJoinOrder::TestJoinOrderHintsComplex-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCH21+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCH21-StreamLookupJoin+ColumnStore |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |88.5%| [LD] {RESULT} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb >> KqpJoinOrder::TPCDS34-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS61-StreamLookupJoin-ColumnStore >> TKqpScanData::DifferentNumberOfInputAndResultColumns |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> KqpJoinOrder::TestJoinOrderHintsSimple-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin-ColumnStore >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> KqpJoinOrder::TPCDS87-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS88+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS94-StreamLookupJoin+ColumnStore [GOOD] >> KqpJoinOrder::TPCDS95+StreamLookupJoin-ColumnStore |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] |88.5%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tests/tpch/tpch |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch |88.5%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch >> TAccessServiceTest::PassRequestId >> FolderServiceTest::TFolderServiceTransitional |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> FolderServiceTest::TFolderServiceAdapter >> FolderServiceTest::TFolderService |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |88.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut >> TSubDomainTest::Boot >> TServiceAccountServiceTest::Get >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> TServiceAccountServiceTest::Get [GOOD] >> TSubDomainTest::LsLs >> TSubDomainTest::StartAndStopTenanNode >> TSubDomainTest::CreateDummyTabletsInDifferentDomains ------- [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__free_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__memalign_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__realloc_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/yqlrun/yqlrun |88.5%| [LD] {RESULT} $(B)/ydb/library/yql/tools/yqlrun/yqlrun >> TSubDomainTest::CreateTablet >> TSubDomainTest::CreateTabletForUnknownDomain |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |88.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |88.6%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics >> TAccessServiceTest::PassRequestId [GOOD] >> FolderServiceTest::TFolderService [GOOD] >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> FolderServiceTest::TFolderServiceAdapter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2024-11-21T23:22:12.598777Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876920656862712:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:22:12.624381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019f9/r3tmp/tmpKoXkZA/pdisk_1.dat 2024-11-21T23:22:13.291495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:13.291620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:13.291831Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:13.293639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:13.769828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:13.785684Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:22:13.833736Z node 1 :GRPC_CLIENT DEBUG: [5160000a0ed0]{trololo} Connect to grpc://localhost:11803 2024-11-21T23:22:13.853386Z node 1 :GRPC_CLIENT DEBUG: [5160000a0ed0]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2024-11-21T23:22:13.932394Z node 1 :GRPC_CLIENT DEBUG: [5160000a0ed0]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TServiceAccountServiceTest::IssueToken >> TServiceAccountServiceTest::IssueToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2024-11-21T23:22:13.389012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876927009064535:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:22:13.394383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019d1/r3tmp/tmp0n2idb/pdisk_1.dat 2024-11-21T23:22:13.934127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:13.934238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:13.941059Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:13.944541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:14.581303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:14.620834Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:22:14.626564Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Connect to grpc://localhost:12927 2024-11-21T23:22:14.638959Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T23:22:14.799235Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12927: Failed to connect to remote host: Connection refused 2024-11-21T23:22:14.806650Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T23:22:14.807372Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12927: Failed to connect to remote host: Connection refused 2024-11-21T23:22:15.810626Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T23:22:15.927313Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Status 5 Not Found 2024-11-21T23:22:15.928049Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2024-11-21T23:22:15.939523Z node 1 :GRPC_CLIENT DEBUG: [51600007f8d0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] >> KqpExplain::AggGroupLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2024-11-21T23:22:13.171039Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876928297344094:2190];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:22:13.171240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019c0/r3tmp/tmp3tjeou/pdisk_1.dat 2024-11-21T23:22:13.807058Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:13.815122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:13.815206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:13.817501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:14.392633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:14.435708Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:22:14.564222Z node 1 :GRPC_CLIENT DEBUG: [51600002e5d0] Connect to grpc://localhost:63082 2024-11-21T23:22:14.565160Z node 1 :GRPC_CLIENT DEBUG: [51600002e5d0] Request ListFoldersRequest { id: "i_am_exists" } 2024-11-21T23:22:14.689744Z node 1 :GRPC_CLIENT DEBUG: [51600002e5d0] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2024-11-21T23:22:14.691001Z node 1 :GRPC_CLIENT DEBUG: [51600005dcd0] Connect to grpc://localhost:65033 2024-11-21T23:22:14.691714Z node 1 :GRPC_CLIENT DEBUG: [51600005dcd0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2024-11-21T23:22:14.700446Z node 1 :GRPC_CLIENT DEBUG: [51600005dcd0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2024-11-21T23:22:14.701075Z node 1 :GRPC_CLIENT DEBUG: [51600005dcd0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2024-11-21T23:22:14.707011Z node 1 :GRPC_CLIENT DEBUG: [51600005dcd0] Status 5 Not Found 2024-11-21T23:22:14.711848Z node 1 :GRPC_CLIENT DEBUG: [51600002e5d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T23:22:14.716235Z node 1 :GRPC_CLIENT DEBUG: [51600002e5d0] Status 5 Not Found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231870.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132231870.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231870.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112231870.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230670.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112230670.000000s;Name=;Codec=}; 2024-11-21T23:21:14.136048Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:21:14.242754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:21:14.267612Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:21:14.267729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:21:14.268007Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:21:14.277268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:21:14.277535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:21:14.277816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:21:14.277953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:21:14.278065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:21:14.278189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:21:14.278382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:21:14.278561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:21:14.278701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:21:14.278837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:21:14.278949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:21:14.279062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:21:14.305064Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:21:14.305168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:21:14.309444Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:21:14.309688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:21:14.309767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:21:14.309979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:14.310141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:21:14.310219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:21:14.310262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:21:14.310372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:21:14.310462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:21:14.310518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:21:14.310583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:21:14.310797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:14.310870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:21:14.310924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:21:14.310961Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:21:14.311064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:21:14.311128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:21:14.311175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:21:14.311217Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:21:14.311294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:21:14.311336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:21:14.311368Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:21:14.311418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:21:14.311470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:21:14.311655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:21:14.311885Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=65; 2024-11-21T23:21:14.311969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2024-11-21T23:21:14.312057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2024-11-21T23:21:14.312169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=58; 2024-11-21T23:21:14.312354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:21:14.312444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:21:14.312488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:21:14.312757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:21:14.312812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:21:14.312849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:21:14.313063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... =(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:22:18.649058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] finished for tablet 9437184 2024-11-21T23:22:18.649154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:686:2689] send ScanData to [1:685:2688] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:22:18.649634Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:686:2689] and sent to [1:685:2688] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.011},{"events":["f_ack"],"t":0.301},{"events":["l_task_result"],"t":0.512},{"events":["l_ProduceResults","f_Finish"],"t":0.52},{"events":["l_ack","l_processing","l_Finish"],"t":0.521}],"full":{"a":1732231338128117,"name":"_full_task","f":1732231338128117,"d_finished":0,"c":0,"l":1732231338649197,"d":521080},"events":[{"name":"bootstrap","f":1732231338128306,"d_finished":5726,"c":1,"l":1732231338134032,"d":5726},{"a":1732231338642359,"name":"ack","f":1732231338429464,"d_finished":1937,"c":3,"l":1732231338642265,"d":8775},{"a":1732231338642348,"name":"processing","f":1732231338139998,"d_finished":343749,"c":24,"l":1732231338642267,"d":350598},{"name":"ProduceResults","f":1732231338130378,"d_finished":7846,"c":29,"l":1732231338649039,"d":7846},{"a":1732231338649043,"name":"Finish","f":1732231338649043,"d_finished":0,"c":0,"l":1732231338649197,"d":154},{"name":"task_result","f":1732231338140023,"d_finished":341279,"c":21,"l":1732231338640214,"d":341279}],"id":"9437184::7"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:686:2689]->[1:685:2688] 2024-11-21T23:22:18.649740Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:22:18.127667Z;index_granules=0;index_portions=3;index_batches=6;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4749668;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4749668;selected_rows=0; 2024-11-21T23:22:18.649780Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:22:18.649945Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile={branch:simple;limit:0;duration:0.323107s;steps_10Ms:[{name=FETCHING_COLUMNS;duration=0.471204s;size=0.002211992;details={columns=1,2,3,4,4294967040,4294967041,4294967042;};};{name=ASSEMBLER::LAST_PK;duration=0.045475s;size=0.003193335;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};};]};; 2024-11-21T23:22:18.650025Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:686:2689];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:22:18.652069Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2024-11-21T23:22:18.652303Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2024-11-21T23:22:18.652444Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2024-11-21T23:22:18.652638Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:486;event=parse_program;program=Command { Projection { Columns { Name: "timestamp" } } } ; 2024-11-21T23:22:18.652723Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:499;event=program_parsed;result=[{projections=[timestamp;];};]; 2024-11-21T23:22:18.653275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:136:2168];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:131;event=TTxScan started;actor_id=[1:702:2705];trace_detailed=; 2024-11-21T23:22:18.653687Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:355;ff_first=(column_ids=1;column_names=timestamp;);; 2024-11-21T23:22:18.653899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.cpp:369;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2024-11-21T23:22:18.654076Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:22:18.654233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:22:18.660879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=actor.cpp:133;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2024-11-21T23:22:18.661054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:192;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:22:18.661197Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;method=produce result;fline=actor.cpp:197;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2024-11-21T23:22:18.661256Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] finished for tablet 9437184 2024-11-21T23:22:18.661366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:702:2705] send ScanData to [1:701:2704] txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0 rows: 0 page faults: 0 finished: 1 pageFault: 0 arrow schema: 2024-11-21T23:22:18.661838Z node 1 :TX_COLUMNSHARD_SCAN INFO: actor.cpp:375 :Scanner finished [1:702:2705] and sent to [1:701:2704] packs: 0 txId: 18446744073709551615 scanId: 0 gen: 0 tablet: 9437184 bytes: 0/0 rows: 0/0 page faults: 0 finished: 1 pageFault: 0 stats:{"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap"],"t":0.001},{"events":["f_ack","f_processing"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":1732231338653201,"name":"_full_task","f":1732231338653201,"d_finished":0,"c":0,"l":1732231338661417,"d":8216},"events":[{"name":"bootstrap","f":1732231338653400,"d_finished":867,"c":1,"l":1732231338654267,"d":867},{"a":1732231338660833,"name":"ack","f":1732231338660833,"d_finished":0,"c":0,"l":1732231338661417,"d":584},{"a":1732231338660774,"name":"processing","f":1732231338660774,"d_finished":0,"c":0,"l":1732231338661417,"d":643},{"name":"ProduceResults","f":1732231338654001,"d_finished":547,"c":2,"l":1732231338661230,"d":547},{"a":1732231338661234,"name":"Finish","f":1732231338661234,"d_finished":0,"c":0,"l":1732231338661417,"d":183}],"id":"9437184::8"};iterator:ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;) Got TEvKqpCompute::TEvScanData [1:702:2705]->[1:701:2704] 2024-11-21T23:22:18.661950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=stats.cpp:8;event=statistic;begin=2024-11-21T23:22:18.652791Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2024-11-21T23:22:18.662002Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=read_context.h:166;event=scan_aborted;reason=unexpected on destructor; 2024-11-21T23:22:18.662054Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:72;profile=; 2024-11-21T23:22:18.662160Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:702:2705];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;fline=context.h:73;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2024-11-21T23:22:12.687319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876924268193461:2130];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:22:12.698911Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019ce/r3tmp/tmpG0mUNm/pdisk_1.dat 2024-11-21T23:22:13.304391Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:13.313721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:13.313868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:13.316468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:13.800546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:13.820904Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Connect to grpc://localhost:19227 2024-11-21T23:22:13.956362Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T23:22:14.045431Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19227: Failed to connect to remote host: Connection refused 2024-11-21T23:22:14.050925Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T23:22:14.051787Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:19227: Failed to connect to remote host: Connection refused 2024-11-21T23:22:15.052481Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Request ListFoldersRequest { id: "i_am_not_exists" } 2024-11-21T23:22:15.057295Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Status 5 Not Found 2024-11-21T23:22:15.058724Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Request ListFoldersRequest { id: "i_am_exists" } 2024-11-21T23:22:15.065700Z node 1 :GRPC_CLIENT DEBUG: [5160000930d0] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231885.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231885.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231885.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231885.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231885.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112231885.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=132231885.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231885.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112230685.000000s;Name=;Codec=}; 2024-11-21T23:21:25.780227Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:21:25.896744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:21:25.916536Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:21:25.916612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:21:25.916830Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:21:25.924091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:21:25.924297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:21:25.924515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:21:25.924621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:21:25.924715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:21:25.924812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:21:25.924904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:21:25.925005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:21:25.925116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:21:25.925213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:21:25.925309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:21:25.925403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:21:25.945971Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:21:25.946047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:21:25.956433Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:21:25.956724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:21:25.956781Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:21:25.956956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:25.957128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:21:25.957218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:21:25.957269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:21:25.957353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:21:25.957416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:21:25.957453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:21:25.957509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:21:25.957669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:25.957725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:21:25.957765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:21:25.957794Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:21:25.957873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:21:25.957921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:21:25.957958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:21:25.957988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:21:25.958049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:21:25.958083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:21:25.958109Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:21:25.958153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:21:25.958193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:21:25.958223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:21:25.958384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2024-11-21T23:21:25.958484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2024-11-21T23:21:25.958564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2024-11-21T23:21:25.958641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2024-11-21T23:21:25.958790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:21:25.958868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:21:25.958909Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:21:25.959090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abst ... ollerLoadingTime=82; 2024-11-21T23:22:19.821083Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:22:19.821126Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=106; 2024-11-21T23:22:19.821214Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=52; 2024-11-21T23:22:19.821292Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=38; 2024-11-21T23:22:19.821542Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=206; 2024-11-21T23:22:19.822185Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=593; 2024-11-21T23:22:19.822280Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=40; 2024-11-21T23:22:19.822383Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=64; 2024-11-21T23:22:19.822530Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2024-11-21T23:22:19.822594Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2024-11-21T23:22:19.822633Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2024-11-21T23:22:19.822713Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2024-11-21T23:22:19.822767Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=15; 2024-11-21T23:22:19.822885Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=69; 2024-11-21T23:22:19.822935Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2024-11-21T23:22:19.823014Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2024-11-21T23:22:19.823064Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15401; 2024-11-21T23:22:19.823231Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=15;blobs=30;rows=400000;bytes=23741012;raw_bytes=40211545; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:22:19.823353Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:22:19.823398Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1558;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T23:22:19.823452Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:22:19.823595Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T23:22:19.823714Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T23:22:19.823847Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:22:19.823899Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:22:19.823989Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:22:19.824038Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:22:19.824101Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:19.824182Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T23:22:19.824246Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:22:19.824287Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:22:19.824338Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:19.824428Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:19.824489Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:22:19.824597Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:22:19.825206Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:19.825361Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1486:3384];tablet_id=9437184;parent=[1:1437:3342];fline=manager.h:99;event=ask_data;request=request_id=109;1={portions_count=15};; 2024-11-21T23:22:19.826026Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:22:19.826334Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:22:19.826375Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:22:19.826428Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:22:19.826474Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:22:19.826539Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:19.826600Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T23:22:19.826656Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:22:19.826692Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:22:19.826761Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:19.826828Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:19.826879Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:22:19.826967Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:22:19.828655Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=15;path_id=1; 2024-11-21T23:22:19.829689Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=15;path_id=1; 2024-11-21T23:22:19.831087Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:22:19.831146Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1437:3342];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::AggGroupLimit [GOOD] Test command err: 2024-11-21T23:19:53.431308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876325429592991:2053];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:53.431344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002848/r3tmp/tmp8Z6ESC/pdisk_1.dat 2024-11-21T23:19:53.884970Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:53.929607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:53.929743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22605, node 1 2024-11-21T23:19:53.967473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:54.124063Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:19:54.124096Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:19:54.124108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:19:54.124205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2024-11-21T23:19:54.233595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:30101 2024-11-21T23:19:54.579688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2024-11-21T23:19:54.703539Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7439876327507799709:2200];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:19:54.728586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:54.728659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:54.745244Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2024-11-21T23:19:54.747184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:54.839841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:19:55.139756Z node 2 :STATISTICS INFO: [72075186224037897] OnActivateExecutor 2024-11-21T23:19:55.139843Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Execute 2024-11-21T23:19:55.144469Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:19:55.158371Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInitSchema::Complete 2024-11-21T23:19:55.158433Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Execute 2024-11-21T23:19:55.158650Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded BaseStatistics: schemeshard count# 0 2024-11-21T23:19:55.158699Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ColumnStatistics: column count# 0 2024-11-21T23:19:55.158735Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ScheduleTraversals: table count# 0 2024-11-21T23:19:55.158757Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalOperations: table count# 0 2024-11-21T23:19:55.158783Z node 2 :STATISTICS DEBUG: [72075186224037897] Loaded ForceTraversalTables: table count# 0 2024-11-21T23:19:55.158818Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxInit::Complete 2024-11-21T23:19:55.159024Z node 2 :STATISTICS INFO: [72075186224037897] Subscribed for config changes 2024-11-21T23:19:55.168834Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7439876331802767311:2226] 2024-11-21T23:19:55.168883Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Execute: database# /Root/Database 2024-11-21T23:19:55.200549Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:19:55.200608Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7439876331802767418:2284], at schemeshard: 72075186224037889, StatisticsAggregatorId: 72075186224037897, at schemeshard: 72075186224037889 2024-11-21T23:19:55.201991Z node 2 :STATISTICS DEBUG: [72075186224037897] TTxConfigure::Complete 2024-11-21T23:19:55.203837Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2024-11-21T23:19:55.203859Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2024-11-21T23:19:55.203929Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2024-11-21T23:19:55.213390Z node 2 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [2:7439876331802767490:2344] 2024-11-21T23:19:55.213775Z node 2 :STATISTICS DEBUG: [72075186224037897] EvConnectSchemeShard, pipe server id = [2:7439876331802767490:2344], schemeshard id = 72075186224037889 2024-11-21T23:19:55.232663Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:19:55.232737Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:19:55.236236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037889 2024-11-21T23:19:55.247399Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037889 PathId: 3 } 2024-11-21T23:19:55.247451Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2024-11-21T23:19:55.247631Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2024-11-21T23:19:55.249432Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2024-11-21T23:19:55.450777Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2024-11-21T23:19:55.550183Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2024-11-21T23:19:57.775932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876342609463384:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:57.776021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:19:58.004470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710659:0, at schemeshard: 72075186224037889 2024-11-21T23:19:58.203240Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:58.203466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:58.203778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:58.203907Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:58.204013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:19:58.204143Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:19:58.204255Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:19:58.204399Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:19:58.204551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:19:58.204663Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:19:58.204778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:19:58.204885Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:7439876344687669768:2326];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:19:58.241219Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439876344687669767:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:19:58.241287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439876344687669767:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:19:58.241512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439876344687669767:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:19:58.241612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[2:7439876344687669767:2325];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:19:58.241709Z node ... STICS DEBUG: SyncNode(), pipe client id = [3:7439876900683381918:2846] 2024-11-21T23:22:07.718369Z node 4 :STATISTICS DEBUG: [72075186224037897] EvServerConnected, pipe server id = [4:7439876902546877836:2611] 2024-11-21T23:22:07.718356Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7439876900683381918:2846], server id = [4:7439876902546877836:2611], tablet id = 72075186224037897, status = OK 2024-11-21T23:22:07.718712Z node 4 :STATISTICS DEBUG: [72075186224037897] EvConnectNode, pipe server id = [4:7439876902546877836:2611], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2024-11-21T23:22:07.718749Z node 4 :STATISTICS DEBUG: [72075186224037897] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2024-11-21T23:22:07.719005Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2024-11-21T23:22:07.719039Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7439876900683381916:2311], StatRequests.size() = 1 2024-11-21T23:22:07.756114Z node 3 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2024-11-21T23:22:07.757187Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2024-11-21T23:22:07.758747Z node 3 :STATISTICS DEBUG: EvClientDestroyed, node id = 3, client id = [3:7439876900683381918:2846], server id = [4:7439876902546877836:2611], tablet id = 72075186224037897 2024-11-21T23:22:07.758771Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7439876900683381921:2849] 2024-11-21T23:22:07.758792Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7439876900683381921:2849] 2024-11-21T23:22:07.822734Z node 3 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [3:7439876900683381918:2846], schemeshard count = 1 Trying to start YDB, gRPC: 11089, MsgBus: 18060 2024-11-21T23:22:09.881646Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7439876910945185959:2196];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/002848/r3tmp/tmpnRwkKe/pdisk_1.dat 2024-11-21T23:22:10.041743Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2024-11-21T23:22:10.144576Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:10.191335Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:10.191452Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:10.194788Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11089, node 5 2024-11-21T23:22:10.399127Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:22:10.399161Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:22:10.399173Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:22:10.399312Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18060 TClient is connected to server localhost:18060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:11.510383Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:11.539808Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:11.657141Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2024-11-21T23:22:11.908131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2024-11-21T23:22:12.044410Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:14.785285Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876932420023991:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:22:14.785381Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:22:14.868133Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7439876910945185959:2196];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:22:14.874714Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:22:14.879182Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:22:14.961972Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:22:15.054631Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:22:15.210211Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:22:15.300653Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:22:15.388439Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:22:15.519513Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876936714991803:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:22:15.519627Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:22:15.519887Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7439876936714991808:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:22:15.524773Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:22:15.565223Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:22:15.568485Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876936714991810:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1}],"GroupBy":"item.App","Aggregation":"{_yql_agg_0: MAX(item.Message,state._yql_agg_0),_yql_agg_1: MIN(item.Message,state._yql_agg_1)}","Name":"Aggregate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 And item.Ts \u003C= 4 Or item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["App"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"columns":["App","Message","Ts"],"scan_by":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 And item.Ts \u003C= 4 Or item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.App","Aggregation":"{_yql_agg_0: MAX(item.Message,state._yql_agg_0),_yql_agg_1: MIN(item.Message,state._yql_agg_1)}","Name":"Aggregate"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-StreamLookupJoin+ColumnStore |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/tstool/tstool |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/tstool/tstool |88.6%| [LD] {RESULT} $(B)/ydb/tools/tstool/tstool >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small+StreamLookupJoin-ColumnStore >> KqpJoinOrder::TPCDS92+StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS92-StreamLookupJoin+ColumnStore |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |88.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |88.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] >> KqpJoinOrder::CanonizedJoinOrderLookupBug >> TSubDomainTest::UserAttributes >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2024-11-21T23:22:13.618073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876926758846336:2187];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:22:13.619137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019a0/r3tmp/tmpD02Kmf/pdisk_1.dat 2024-11-21T23:22:14.264783Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:14.310218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:14.314714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:14.323994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5623 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:15.039248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019a0/r3tmp/tmp5zNyq0/pdisk_1.dat 2024-11-21T23:22:19.358414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:22:19.406384Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:19.425657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:19.425763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:19.427387Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:19.904935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:19.912054Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |88.7%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming >> TSubDomainTest::LsAltered [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231891.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231891.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=132231891.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231891.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231891.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112231891.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=132231891.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=132231891.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=112230691.000000s;Name=;Codec=}; 2024-11-21T23:21:31.982559Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:21:32.104520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:21:32.144631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:21:32.144721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:21:32.145020Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:21:32.155774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:21:32.156048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:21:32.156330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:21:32.156445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:21:32.156541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:21:32.156653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:21:32.156752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:21:32.156865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:21:32.157014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:21:32.157114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:21:32.157209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:21:32.157300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:21:32.179630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:21:32.179742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:21:32.194785Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:21:32.195129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:21:32.195195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:21:32.195403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:32.195616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:21:32.195701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:21:32.195767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:21:32.195857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:21:32.195936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:21:32.195986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:21:32.196025Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:21:32.196204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:32.196266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:21:32.196310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:21:32.196343Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:21:32.196434Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:21:32.196488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:21:32.196531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:21:32.196565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:21:32.196639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:21:32.196678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:21:32.196707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:21:32.196757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:21:32.196811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:21:32.196840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:21:32.197088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=94; 2024-11-21T23:21:32.197183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2024-11-21T23:21:32.197268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2024-11-21T23:21:32.197357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2024-11-21T23:21:32.197542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:21:32.197603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:21:32.197638Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:21:32.197849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abst ... ollerLoadingTime=79; 2024-11-21T23:22:25.645357Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:22:25.645419Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=123; 2024-11-21T23:22:25.645529Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=48; 2024-11-21T23:22:25.645607Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=35; 2024-11-21T23:22:25.645829Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=188; 2024-11-21T23:22:25.646444Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=569; 2024-11-21T23:22:25.646568Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=40; 2024-11-21T23:22:25.646644Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=38; 2024-11-21T23:22:25.646681Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2024-11-21T23:22:25.646734Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2024-11-21T23:22:25.646854Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=89; 2024-11-21T23:22:25.646914Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=29; 2024-11-21T23:22:25.646952Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2024-11-21T23:22:25.647044Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2024-11-21T23:22:25.647086Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2024-11-21T23:22:25.647160Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2024-11-21T23:22:25.647193Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=33112; 2024-11-21T23:22:25.647308Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=15;blobs=30;rows=400000;bytes=23741012;raw_bytes=40211545; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:22:25.647418Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:22:25.647455Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];process=SwitchToWork;fline=columnshard_impl.cpp:1558;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T23:22:25.647514Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:22:25.647630Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T23:22:25.647719Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T23:22:25.647829Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:22:25.647862Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:22:25.647928Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:22:25.647967Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:22:25.648022Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:25.648083Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T23:22:25.648154Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:22:25.648191Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:22:25.648230Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:25.648280Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:25.648317Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:22:25.648395Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:22:25.649057Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:25.649151Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1490:3388];tablet_id=9437184;parent=[1:1441:3346];fline=manager.h:99;event=ask_data;request=request_id=109;1={portions_count=15};; 2024-11-21T23:22:25.649961Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:22:25.650320Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:22:25.650350Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:22:25.650373Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:22:25.650487Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:22:25.650540Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:25.650589Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=4; 2024-11-21T23:22:25.650662Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700011;tx_id=18446744073709551615;;current_snapshot_ts=1000000005; 2024-11-21T23:22:25.650705Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:22:25.650745Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:25.650804Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:25.650846Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:22:25.650910Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:22:25.655336Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=15;path_id=1; 2024-11-21T23:22:25.656284Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=15;path_id=1; 2024-11-21T23:22:25.657504Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:22:25.657551Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1441:3346];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'tier1' stopped at tablet 9437184 240000/14241316 160000/9495672 80000/4750028 0/0 >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2024-11-21T23:22:14.333232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876929525353635:2238];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:22:14.333351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019dd/r3tmp/tmpni96N5/pdisk_1.dat 2024-11-21T23:22:15.247377Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:15.278757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:15.278876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:15.317646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:15.939801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:15.990831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/0019dd/r3tmp/tmp4ifURu/pdisk_1.dat 2024-11-21T23:22:21.899799Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2024-11-21T23:22:21.977589Z node 2 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:22.035664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:22.035771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:22.043994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:22:22.400253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:22:22.410733Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2024-11-21T23:22:16.877950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876939208321234:2189];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:22:16.878053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/001228/r3tmp/tmpy9Fh0F/pdisk_1.dat 2024-11-21T23:22:17.425266Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:17.429231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:22:17.429381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:22:17.433275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17274 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2024-11-21T23:22:17.681204Z node 1 :TX_PROXY DEBUG: actor# [1:7439876939208321327:2102] Handle TEvNavigate describe path dc-1 2024-11-21T23:22:17.681282Z node 1 :TX_PROXY DEBUG: Actor# [1:7439876943503288898:2254] HANDLE EvNavigateScheme dc-1 2024-11-21T23:22:17.682252Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7439876943503288646:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:22:17.682330Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7439876943503288646:2116], path# /dc-1, domainOwnerId# 72057594046644480 2024-11-21T23:22:17.682630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2024-11-21T23:22:17.684706Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321041:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439876943503288903:2255] 2024-11-21T23:22:17.684766Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439876939208321041:2049] Subscribe: subscriber# [1:7439876943503288903:2255], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:22:17.684816Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321047:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439876943503288905:2255] 2024-11-21T23:22:17.684865Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439876939208321047:2055] Subscribe: subscriber# [1:7439876943503288905:2255], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:22:17.684917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288903:2255][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439876939208321041:2049] 2024-11-21T23:22:17.684959Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288905:2255][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439876939208321047:2055] 2024-11-21T23:22:17.685002Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439876943503288900:2255] 2024-11-21T23:22:17.685026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439876943503288902:2255] 2024-11-21T23:22:17.685075Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7439876943503288899:2255][/dc-1] Set up state: owner# [1:7439876943503288646:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:22:17.685194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288903:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439876943503288900:2255], cookie# 1 2024-11-21T23:22:17.685207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288904:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439876943503288901:2255], cookie# 1 2024-11-21T23:22:17.685219Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288905:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439876943503288902:2255], cookie# 1 2024-11-21T23:22:17.685272Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321041:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439876943503288903:2255] 2024-11-21T23:22:17.685297Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321041:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439876943503288903:2255], cookie# 1 2024-11-21T23:22:17.685326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321047:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439876943503288905:2255] 2024-11-21T23:22:17.685357Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321047:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439876943503288905:2255], cookie# 1 2024-11-21T23:22:17.686457Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321044:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7439876943503288904:2255] 2024-11-21T23:22:17.686504Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7439876939208321044:2052] Subscribe: subscriber# [1:7439876943503288904:2255], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2024-11-21T23:22:17.686574Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321044:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7439876943503288904:2255], cookie# 1 2024-11-21T23:22:17.686617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288903:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439876939208321041:2049], cookie# 1 2024-11-21T23:22:17.686632Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288905:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439876939208321047:2055], cookie# 1 2024-11-21T23:22:17.686654Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288904:2255][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439876939208321044:2052] 2024-11-21T23:22:17.686671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7439876943503288904:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439876939208321044:2052], cookie# 1 2024-11-21T23:22:17.686763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439876943503288900:2255], cookie# 1 2024-11-21T23:22:17.686805Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:22:17.686833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439876943503288902:2255], cookie# 1 2024-11-21T23:22:17.686862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:22:17.686910Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7439876943503288901:2255] 2024-11-21T23:22:17.686961Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7439876943503288899:2255][/dc-1] Path was already updated: owner# [1:7439876943503288646:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2024-11-21T23:22:17.686995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7439876943503288901:2255], cookie# 1 2024-11-21T23:22:17.687008Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7439876943503288899:2255][/dc-1] Unexpected sync response: sender# [1:7439876943503288901:2255], cookie# 1 2024-11-21T23:22:17.687040Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7439876939208321044:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7439876943503288904:2255] 2024-11-21T23:22:17.752392Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7439876943503288646:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 } 2024-11-21T23:22:17.753011Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7439876943503288646:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: ... 23] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2024-11-21T23:22:23.410058Z node 2 :TX_PROXY DEBUG: Actor# [2:7439876971078925305:2323] Handle TEvDescribeSchemeResult Forward to# [2:7439876971078925304:2322] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231342926 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231342926 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /dc-1 2024-11-21T23:22:23.411982Z node 2 :TX_PROXY DEBUG: actor# [2:7439876966783957582:2098] Handle TEvNavigate describe path /dc-1 2024-11-21T23:22:23.412020Z node 2 :TX_PROXY DEBUG: Actor# [2:7439876971078925308:2326] HANDLE EvNavigateScheme /dc-1 2024-11-21T23:22:23.412120Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7439876966783957635:2112], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2024-11-21T23:22:23.412227Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439876966783957889:2253][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7439876966783957635:2112], cookie# 4 2024-11-21T23:22:23.412288Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439876966783957893:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439876966783957890:2253], cookie# 4 2024-11-21T23:22:23.412318Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439876966783957894:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439876966783957891:2253], cookie# 4 2024-11-21T23:22:23.412344Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439876966783957895:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439876966783957892:2253], cookie# 4 2024-11-21T23:22:23.412370Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439876966783957339:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439876966783957893:2253], cookie# 4 2024-11-21T23:22:23.412403Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439876966783957342:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439876966783957894:2253], cookie# 4 2024-11-21T23:22:23.412428Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7439876966783957345:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7439876966783957895:2253], cookie# 4 2024-11-21T23:22:23.412462Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439876966783957893:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439876966783957339:2049], cookie# 4 2024-11-21T23:22:23.412485Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439876966783957894:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439876966783957342:2052], cookie# 4 2024-11-21T23:22:23.412499Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7439876966783957895:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439876966783957345:2055], cookie# 4 2024-11-21T23:22:23.412526Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439876966783957889:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439876966783957890:2253], cookie# 4 2024-11-21T23:22:23.412548Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439876966783957889:2253][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2024-11-21T23:22:23.412565Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439876966783957889:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439876966783957891:2253], cookie# 4 2024-11-21T23:22:23.412583Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439876966783957889:2253][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2024-11-21T23:22:23.412603Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439876966783957889:2253][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [2:7439876966783957892:2253], cookie# 4 2024-11-21T23:22:23.412616Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7439876966783957889:2253][/dc-1] Unexpected sync response: sender# [2:7439876966783957892:2253], cookie# 4 2024-11-21T23:22:23.412653Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7439876966783957635:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2024-11-21T23:22:23.412725Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7439876966783957635:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7439876966783957889:2253] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732231342898 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2024-11-21T23:22:23.412806Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7439876966783957635:2112], cacheItem# { Subscriber: { Subscriber: [2:7439876966783957889:2253] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1732231342898 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2024-11-21T23:22:23.412936Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7439876971078925309:2327], recipient# [2:7439876971078925308:2326], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) } }] } 2024-11-21T23:22:23.412975Z node 2 :TX_PROXY DEBUG: Actor# [2:7439876971078925308:2326] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2024-11-21T23:22:23.413028Z node 2 :TX_PROXY DEBUG: Actor# [2:7439876971078925308:2326] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2024-11-21T23:22:23.413519Z node 2 :TX_PROXY DEBUG: Actor# [2:7439876971078925308:2326] Handle TEvDescribeSchemeResult Forward to# [2:7439876971078925307:2325] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231342898 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1732231342898 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1732231342926 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594... (TRUNCATED) >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf |88.7%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::TPCDS23-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::TPCDS23+StreamLookupJoin-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees+StreamLookupJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12891, MsgBus: 1999 2024-11-21T23:20:29.823398Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7439876480153147956:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:29.823455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/dl5p/000e8f/r3tmp/tmpdcMm9D/pdisk_1.dat 2024-11-21T23:20:30.405190Z node 1 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:20:30.420633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2024-11-21T23:20:30.420729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2024-11-21T23:20:30.426460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12891, node 1 2024-11-21T23:20:30.512465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2024-11-21T23:20:30.512493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2024-11-21T23:20:30.512508Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2024-11-21T23:20:30.512612Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1999 TClient is connected to server localhost:1999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coo... (TRUNCATED) WaitRootIsUp 'Root' success. 2024-11-21T23:20:31.074520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:31.092517Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2024-11-21T23:20:31.107524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:31.234648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:31.380957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:31.472208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2024-11-21T23:20:33.882561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876497333018783:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:33.897769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:33.932957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2024-11-21T23:20:33.979441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.024103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.076373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.115765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.186480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2024-11-21T23:20:34.251369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876501627986581:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.251437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.251764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7439876501627986586:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2024-11-21T23:20:34.255520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:20:34.267194Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2024-11-21T23:20:34.267493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7439876501627986588:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:20:34.867267Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7439876480153147956:2118];send_to=[0:7307199536658146131:7762515]; 2024-11-21T23:20:34.867414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2024-11-21T23:20:36.060665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.108261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.180616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.234461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.265339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.438231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.485869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.543621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.611362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.669519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.710282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.804964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T23:20:36.852522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.330641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T23:20:37.375122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.401103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.437601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:20:37.489170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 720 ... access permissions } 2024-11-21T23:22:14.428089Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2024-11-21T23:22:14.449177Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7439876929636385815:2438], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2024-11-21T23:22:17.337585Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2024-11-21T23:22:17.410629Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2024-11-21T23:22:17.525811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2024-11-21T23:22:17.653743Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2024-11-21T23:22:17.798330Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2024-11-21T23:22:18.155664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2024-11-21T23:22:18.255080Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2024-11-21T23:22:18.322273Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2024-11-21T23:22:18.385648Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2024-11-21T23:22:18.450641Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2024-11-21T23:22:18.529522Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2024-11-21T23:22:18.589276Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2024-11-21T23:22:18.661480Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2024-11-21T23:22:19.860181Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:2, at schemeshard: 72057594046644480 2024-11-21T23:22:19.957461Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.084232Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.193996Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.280474Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.368783Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.458579Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.534814Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.594564Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.718988Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.877537Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2024-11-21T23:22:20.984827Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.162826Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.291263Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.391717Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.503543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.611503Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.724542Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.795362Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710702:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.863345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480 2024-11-21T23:22:21.969994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2024-11-21T23:22:22.085005Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2024-11-21T23:22:22.184251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480 2024-11-21T23:22:22.256334Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 2024-11-21T23:22:22.282756Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2024-11-21T23:22:22.282781Z node 5 :IMPORT WARN: Table profiles were not loaded 2024-11-21T23:22:22.736205Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:1, at schemeshard: 72057594046644480 2024-11-21T23:22:22.824779Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2024-11-21T23:22:22.946941Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2024-11-21T23:22:23.062967Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2024-11-21T23:22:23.228953Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2024-11-21T23:22:23.344474Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2024-11-21T23:22:23.471394Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2024-11-21T23:22:23.573029Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2024-11-21T23:22:23.674020Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2024-11-21T23:22:23.944651Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2024-11-21T23:22:24.041671Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2024-11-21T23:22:24.154454Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2024-11-21T23:22:24.228753Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin-ColumnStore [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78-StreamLookupJoin+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:69;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:103;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=132231883.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=132231883.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112231883.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112231883.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=112230683.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=112230683.000000s;Name=;Codec=}; 2024-11-21T23:21:25.675137Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2024-11-21T23:21:25.889984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:89;event=initialize_shard;step=OnActivateExecutor; 2024-11-21T23:21:25.951605Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:95;event=tiering_new_event; 2024-11-21T23:21:25.951725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];fline=columnshard.cpp:105;event=initialize_shard;step=initialize_tiring_finished; 2024-11-21T23:21:25.952058Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2024-11-21T23:21:25.962248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2024-11-21T23:21:25.962544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2024-11-21T23:21:25.962846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2024-11-21T23:21:25.962969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2024-11-21T23:21:25.963077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2024-11-21T23:21:25.963213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2024-11-21T23:21:25.963336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2024-11-21T23:21:25.963463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2024-11-21T23:21:25.963608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2024-11-21T23:21:25.963737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2024-11-21T23:21:25.963845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2024-11-21T23:21:25.963962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:136:2168];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2024-11-21T23:21:25.996899Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2024-11-21T23:21:25.997014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:136:2168];process=Enqueue;ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=controllers.cpp:10;event=OnTieringModified;count=1; 2024-11-21T23:21:26.026967Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2024-11-21T23:21:26.027363Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2024-11-21T23:21:26.027424Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2024-11-21T23:21:26.027676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:26.027881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2024-11-21T23:21:26.027981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2024-11-21T23:21:26.028024Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2024-11-21T23:21:26.028114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2024-11-21T23:21:26.028182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2024-11-21T23:21:26.028251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2024-11-21T23:21:26.028294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2024-11-21T23:21:26.028485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2024-11-21T23:21:26.028551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2024-11-21T23:21:26.028597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2024-11-21T23:21:26.028640Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2024-11-21T23:21:26.028730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2024-11-21T23:21:26.028785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2024-11-21T23:21:26.028830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2024-11-21T23:21:26.028862Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2024-11-21T23:21:26.028939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2024-11-21T23:21:26.028978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2024-11-21T23:21:26.029011Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2024-11-21T23:21:26.029063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2024-11-21T23:21:26.029109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2024-11-21T23:21:26.029140Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2024-11-21T23:21:26.029373Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=69; 2024-11-21T23:21:26.029467Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2024-11-21T23:21:26.029557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2024-11-21T23:21:26.029649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2024-11-21T23:21:26.029834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2024-11-21T23:21:26.029910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2024-11-21T23:21:26.029954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2024-11-21T23:21:26.030171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2024-11-21T23:21:26.030215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2024-11-21T23:21:26.030248Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2024-11-21T23:21:26.034678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_f ... t_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=74; 2024-11-21T23:22:31.174551Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tx_controller;fline=tx_controller.cpp:89;override=0;no_dl=0;dl=0;operators=0;plan=0;dl_queue=0; 2024-11-21T23:22:31.174593Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=97; 2024-11-21T23:22:31.174697Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=66; 2024-11-21T23:22:31.174782Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2024-11-21T23:22:31.174966Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=144; 2024-11-21T23:22:31.175388Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=386; 2024-11-21T23:22:31.175464Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=34; 2024-11-21T23:22:31.175530Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=32; 2024-11-21T23:22:31.175574Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2024-11-21T23:22:31.175613Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2024-11-21T23:22:31.175650Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2024-11-21T23:22:31.175713Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2024-11-21T23:22:31.175750Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2024-11-21T23:22:31.175855Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=71; 2024-11-21T23:22:31.175900Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2024-11-21T23:22:31.175958Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2024-11-21T23:22:31.175988Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12666; 2024-11-21T23:22:31.176108Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; s-compacted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; inactive portions=6;blobs=12;rows=160000;bytes=9495312;raw_bytes=16084618; evicted portions=0;blobs=0;rows=0;bytes=0;raw_bytes=0; at tablet 9437184 2024-11-21T23:22:31.176213Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard.cpp:61;event=initialize_shard;step=SwitchToWork; 2024-11-21T23:22:31.176254Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard_impl.cpp:1558;event=activate_tiering;path_id=1;tiering=Tiering1; 2024-11-21T23:22:31.176294Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard_impl.cpp:1582;event=OnTieringModified;path_id=1; 2024-11-21T23:22:31.176383Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=manager.cpp:204;path_id=1;tiering_name=Tiering1;event=activation; 2024-11-21T23:22:31.176455Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=column_engine_logs.cpp:559;event=OnTieringModified;new_count_tierings=1;new_count_ttls=1; 2024-11-21T23:22:31.176529Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard.cpp:69;event=initialize_shard;step=SignalTabletActive; 2024-11-21T23:22:31.176562Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];process=SwitchToWork;fline=columnshard__progress_tx.cpp:107;event=EnqueueProgressTx;tablet_id=9437184; 2024-11-21T23:22:31.176627Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;fline=column_engine.cpp:27;total=270443335680;kff=0.3; 2024-11-21T23:22:31.176662Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:22:31.176712Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:31.176778Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T23:22:31.176834Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T23:22:31.176870Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:22:31.176912Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:31.176956Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:31.176993Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:22:31.177082Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:22:31.177597Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:31.177675Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1188:3084];tablet_id=9437184;parent=[1:1151:3054];fline=manager.h:99;event=ask_data;request=request_id=79;1={portions_count=6};; 2024-11-21T23:22:31.178328Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:222;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2024-11-21T23:22:31.179033Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:211;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2024-11-21T23:22:31.179068Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2024-11-21T23:22:31.179115Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2024-11-21T23:22:31.179154Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:510;event=EnqueueBackgroundActivities;periodic=0; 2024-11-21T23:22:31.179220Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:756;event=start_indexation_tasks;insert_overload_size=0; 2024-11-21T23:22:31.179274Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:328;event=StartCleanup;portions_count=2; 2024-11-21T23:22:31.179344Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:368;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000007; 2024-11-21T23:22:31.179387Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:401;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0; 2024-11-21T23:22:31.179436Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1030;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:31.179475Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1062;background=cleanup;skip_reason=no_changes; 2024-11-21T23:22:31.179521Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:414;event=StartTtl;external=0; 2024-11-21T23:22:31.179600Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:972;background=ttl;skip_reason=no_changes; 2024-11-21T23:22:31.182812Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1398;event=TTxAskPortionChunks::Execute;size=6;path_id=1; 2024-11-21T23:22:31.183267Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1419;event=TTxAskPortionChunks::Execute;stage=processing;size=6;path_id=1; 2024-11-21T23:22:31.183816Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1453;event=TTxAskPortionChunks::Execute;stage=finished;size=0;path_id=1; 2024-11-21T23:22:31.183877Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1151:3054];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;fline=columnshard_impl.cpp:1457;event=TTxAskPortionChunks::Execute;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:91 :Tier 'cold' stopped at tablet 9437184 160000/9495312 160000/9495312 80000/4749668 0/0 >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable